text
stringlengths 27
775k
|
---|
# typed: strict
# frozen_string_literal: true
require "sorbet-runtime"
require_relative "struct"
module LunchMoney
extend T::Sig
class Split < T::Struct
prop :date, String
prop :category_id, Integer
prop :notes, T.nilable(String)
prop :amount, T.any(Integer, String)
end
end
|
package estimatecost
import (
"github.com/Appliscale/perun/parameters"
"github.com/Appliscale/perun/stack/stack_mocks"
"github.com/aws/aws-sdk-go/service/cloudformation"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"io/ioutil"
"testing"
)
func TestEstimateCosts(t *testing.T) {
templatePath := "../validator/test_resources/test_template.yaml"
ctx := stack_mocks.SetupContext(t, []string{"cmd", "estimate-cost", templatePath})
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl)
ctx.CloudFormation = mockAWSPI
templateBodyBytes, err := ioutil.ReadFile(templatePath)
assert.NoError(t, err)
templateBody := string(templateBodyBytes)
templateParameters, err := parameters.ResolveParameters(ctx)
assert.NoError(t, err)
url := "url"
mockAWSPI.
EXPECT().
EstimateTemplateCost(&cloudformation.EstimateTemplateCostInput{
TemplateBody: &templateBody,
Parameters: templateParameters,
}).
Times(1).
Return(&cloudformation.EstimateTemplateCostOutput{Url: &url}, nil)
EstimateCosts(ctx)
}
|
HTML Renderer Core
=============
HTML-Renderer ported to run on .Net Core (original project: https://github.com/ArthurHub/HTML-Renderer) |
---
layout: watch
title: TLP4 - 07/05/2020 - M20200507_074406_TLP_4T.jpg
date: 2020-05-07 07:44:06
permalink: /2020/05/07/watch/M20200507_074406_TLP_4
capture: TLP4/2020/202005/20200506/M20200507_074406_TLP_4T.jpg
---
|
package uy.kohesive.iac.model.aws.clients
import com.amazonaws.services.budgets.AbstractAWSBudgets
import com.amazonaws.services.budgets.AWSBudgets
import com.amazonaws.services.budgets.model.*
import uy.kohesive.iac.model.aws.IacContext
import uy.kohesive.iac.model.aws.proxy.makeProxy
open class BaseDeferredAWSBudgets(val context: IacContext) : AbstractAWSBudgets(), AWSBudgets {
override fun createBudget(request: CreateBudgetRequest): CreateBudgetResult {
return with (context) {
request.registerWithAutoName()
CreateBudgetResult().registerWithSameNameAs(request)
}
}
override fun createNotification(request: CreateNotificationRequest): CreateNotificationResult {
return with (context) {
request.registerWithAutoName()
CreateNotificationResult().registerWithSameNameAs(request)
}
}
override fun createSubscriber(request: CreateSubscriberRequest): CreateSubscriberResult {
return with (context) {
request.registerWithAutoName()
CreateSubscriberResult().registerWithSameNameAs(request)
}
}
}
class DeferredAWSBudgets(context: IacContext) : BaseDeferredAWSBudgets(context)
|
// Coordinate Types
use core::{convert::TryFrom, ops::*};
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub struct Point {
pub x: isize,
pub y: isize,
}
impl Point {
#[inline]
pub const fn new(x: isize, y: isize) -> Self {
Self { x, y }
}
#[inline]
pub const fn x(&self) -> isize {
self.x
}
#[inline]
pub const fn y(&self) -> isize {
self.y
}
pub fn line_to<F>(&self, other: Point, mut f: F)
where
F: FnMut(Self),
{
let c0 = *self;
let c1 = other;
let d = Point::new(
if c1.x > c0.x {
c1.x - c0.x
} else {
c0.x - c1.x
},
if c1.y > c0.y {
c1.y - c0.y
} else {
c0.y - c1.y
},
);
let s = Self::new(
if c1.x > c0.x { 1 } else { -1 },
if c1.y > c0.y { 1 } else { -1 },
);
let mut c0 = c0;
let mut e = d.x - d.y;
loop {
f(c0);
if c0.x == c1.x && c0.y == c1.y {
break;
}
let e2 = e + e;
if e2 > -d.y {
e -= d.y;
c0.x += s.x;
}
if e2 < d.x {
e += d.x;
c0.y += s.y;
}
}
}
#[inline]
pub fn is_within(self, rect: Rect) -> bool {
if let Ok(coords) = Coordinates::from_rect(rect) {
coords.left <= self.x
&& coords.right > self.x
&& coords.top <= self.y
&& coords.bottom > self.y
} else {
false
}
}
}
impl Add<Self> for Point {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Point {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl Add<Size> for Point {
type Output = Self;
fn add(self, rhs: Size) -> Self {
Point {
x: self.x + rhs.width,
y: self.y + rhs.height,
}
}
}
impl Add<isize> for Point {
type Output = Self;
fn add(self, rhs: isize) -> Self {
Point {
x: self.x + rhs,
y: self.y + rhs,
}
}
}
impl AddAssign for Point {
fn add_assign(&mut self, rhs: Self) {
*self = Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl Sub<Self> for Point {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Point {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
impl Sub<Size> for Point {
type Output = Self;
fn sub(self, rhs: Size) -> Self {
Point {
x: self.x - rhs.width,
y: self.y - rhs.height,
}
}
}
impl Sub<isize> for Point {
type Output = Self;
fn sub(self, rhs: isize) -> Self {
Point {
x: self.x - rhs,
y: self.y - rhs,
}
}
}
impl SubAssign for Point {
fn sub_assign(&mut self, rhs: Self) {
*self = Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub struct Size {
pub width: isize,
pub height: isize,
}
impl Size {
#[inline]
pub const fn new(width: isize, height: isize) -> Self {
Self { width, height }
}
#[inline]
pub const fn width(&self) -> isize {
self.width
}
#[inline]
pub const fn height(&self) -> isize {
self.height
}
}
impl Add<Self> for Size {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Size {
width: self.width + rhs.width,
height: self.height + rhs.height,
}
}
}
impl Add<EdgeInsets> for Size {
type Output = Self;
fn add(self, rhs: EdgeInsets) -> Self {
Size {
width: self.width + rhs.left + rhs.right,
height: self.height + rhs.top + rhs.bottom,
}
}
}
impl AddAssign<Self> for Size {
fn add_assign(&mut self, rhs: Self) {
*self = Self {
width: self.width + rhs.width,
height: self.height + rhs.height,
}
}
}
impl AddAssign<EdgeInsets> for Size {
fn add_assign(&mut self, rhs: EdgeInsets) {
*self = Self {
width: self.width + rhs.left + rhs.right,
height: self.height + rhs.top + rhs.bottom,
}
}
}
impl Sub<Self> for Size {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Size {
width: self.width - rhs.width,
height: self.height - rhs.height,
}
}
}
impl Sub<EdgeInsets> for Size {
type Output = Self;
fn sub(self, rhs: EdgeInsets) -> Self {
Size {
width: self.width - (rhs.left + rhs.left),
height: self.height - (rhs.top + rhs.bottom),
}
}
}
impl SubAssign for Size {
fn sub_assign(&mut self, rhs: Self) {
*self = Self {
width: self.width - rhs.width,
height: self.height - rhs.height,
}
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub struct Rect {
pub origin: Point,
pub size: Size,
}
impl Rect {
#[inline]
pub const fn new(x: isize, y: isize, width: isize, height: isize) -> Self {
Self {
origin: Point { x, y },
size: Size { width, height },
}
}
#[inline]
pub const fn origin(&self) -> Point {
self.origin
}
#[inline]
pub const fn x(&self) -> isize {
self.origin.x
}
#[inline]
pub const fn y(&self) -> isize {
self.origin.y
}
#[inline]
pub const fn size(&self) -> Size {
self.size
}
#[inline]
pub const fn width(&self) -> isize {
self.size.width
}
#[inline]
pub const fn height(&self) -> isize {
self.size.height
}
#[inline]
pub fn insets_by(self, insets: EdgeInsets) -> Self {
Rect {
origin: Point {
x: self.origin.x + insets.left,
y: self.origin.y + insets.top,
},
size: Size {
width: self.size.width - (insets.left + insets.right),
height: self.size.height - (insets.top + insets.bottom),
},
}
}
pub fn is_within_rect(self, rhs: Self) -> bool {
let cl = match Coordinates::from_rect(self) {
Ok(coords) => coords,
Err(_) => return false,
};
let cr = match Coordinates::from_rect(rhs) {
Ok(coords) => coords,
Err(_) => return false,
};
cl.left < cr.right && cr.left < cl.right && cl.top < cr.bottom && cr.top < cl.bottom
}
pub fn center(&self) -> Point {
Point::new(
self.origin.x + self.size.width / 2,
self.origin.y + self.size.height / 2,
)
}
}
impl From<Size> for Rect {
fn from(size: Size) -> Self {
Rect {
origin: Point::new(0, 0),
size,
}
}
}
impl Add<Point> for Rect {
type Output = Self;
fn add(self, rhs: Point) -> Self::Output {
Self {
origin: self.origin + rhs,
size: self.size,
}
}
}
impl Sub<Point> for Rect {
type Output = Self;
fn sub(self, rhs: Point) -> Self::Output {
Self {
origin: self.origin - rhs,
size: self.size,
}
}
}
impl Add<Size> for Rect {
type Output = Self;
fn add(self, rhs: Size) -> Self::Output {
Self {
origin: self.origin,
size: self.size + rhs,
}
}
}
impl Sub<Size> for Rect {
type Output = Self;
fn sub(self, rhs: Size) -> Self::Output {
Self {
origin: self.origin,
size: self.size - rhs,
}
}
}
impl Add<EdgeInsets> for Rect {
type Output = Self;
fn add(self, rhs: EdgeInsets) -> Self::Output {
Rect {
origin: Point {
x: self.origin.x - rhs.left,
y: self.origin.y - rhs.top,
},
size: Size {
width: self.size.width + (rhs.left + rhs.right),
height: self.size.height + (rhs.top + rhs.bottom),
},
}
}
}
impl Sub<EdgeInsets> for Rect {
type Output = Self;
fn sub(self, rhs: EdgeInsets) -> Self::Output {
self.insets_by(rhs)
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub struct Coordinates {
pub left: isize,
pub top: isize,
pub right: isize,
pub bottom: isize,
}
impl Coordinates {
#[inline]
pub const fn new(left: isize, top: isize, right: isize, bottom: isize) -> Self {
Self {
left,
top,
right,
bottom,
}
}
#[inline]
pub fn from_two(c1: Point, c2: Point) -> Self {
Self {
left: isize::min(c1.x, c2.x),
top: isize::min(c1.y, c2.y),
right: isize::max(c1.x, c2.x),
bottom: isize::max(c1.y, c2.y),
}
}
#[inline]
pub fn left_top(&self) -> Point {
Point::new(self.left, self.top)
}
#[inline]
pub fn right_bottom(&self) -> Point {
Point::new(self.right, self.bottom)
}
#[inline]
pub fn left_bottom(&self) -> Point {
Point::new(self.left, self.bottom)
}
#[inline]
pub fn right_top(&self) -> Point {
Point::new(self.right, self.top)
}
#[inline]
pub fn size(&self) -> Size {
Size::new(self.right - self.left, self.bottom - self.top)
}
#[inline]
pub fn comprehensive(&self, other: Self) -> Self {
Self {
left: isize::min(self.left, other.left),
top: isize::min(self.top, other.top),
right: isize::max(self.right, other.right),
bottom: isize::max(self.bottom, other.bottom),
}
}
#[inline]
pub fn from_rect(rect: Rect) -> Result<Coordinates, ()> {
if rect.size.width == 0 || rect.size.height == 0 {
Err(())
} else {
Ok(unsafe { Self::from_rect_unchecked(rect) })
}
}
#[inline]
pub unsafe fn from_rect_unchecked(rect: Rect) -> Coordinates {
let left: isize;
let right: isize;
if rect.size.width > 0 {
left = rect.origin.x;
right = left + rect.size.width;
} else {
right = rect.origin.x;
left = right + rect.size.width;
}
let top: isize;
let bottom: isize;
if rect.size.height > 0 {
top = rect.origin.y;
bottom = top + rect.size.height;
} else {
bottom = rect.origin.y;
top = bottom + rect.size.height;
}
Self {
left,
top,
right,
bottom,
}
}
}
impl Add for Coordinates {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
self.comprehensive(rhs)
}
}
impl AddAssign for Coordinates {
fn add_assign(&mut self, rhs: Self) {
*self = self.comprehensive(rhs)
}
}
impl TryFrom<Rect> for Coordinates {
type Error = ();
fn try_from(value: Rect) -> Result<Self, Self::Error> {
Self::from_rect(value)
}
}
impl From<Coordinates> for Rect {
fn from(coods: Coordinates) -> Rect {
Rect {
origin: coods.left_top(),
size: coods.size(),
}
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub struct EdgeInsets {
pub top: isize,
pub left: isize,
pub bottom: isize,
pub right: isize,
}
impl EdgeInsets {
#[inline]
pub const fn new(top: isize, left: isize, bottom: isize, right: isize) -> Self {
Self {
top,
left,
bottom,
right,
}
}
#[inline]
pub const fn padding_each(value: isize) -> Self {
Self {
top: value,
left: value,
bottom: value,
right: value,
}
}
}
impl Add for EdgeInsets {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
top: self.top + rhs.top,
left: self.left + rhs.left,
bottom: self.bottom + rhs.bottom,
right: self.right + rhs.right,
}
}
}
impl AddAssign for EdgeInsets {
fn add_assign(&mut self, rhs: Self) {
*self = Self {
top: self.top + rhs.top,
left: self.left + rhs.left,
bottom: self.bottom + rhs.bottom,
right: self.right + rhs.right,
}
}
}
impl Sub for EdgeInsets {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
top: self.top - rhs.top,
left: self.left - rhs.left,
bottom: self.bottom - rhs.bottom,
right: self.right - rhs.right,
}
}
}
impl SubAssign for EdgeInsets {
fn sub_assign(&mut self, rhs: Self) {
*self = Self {
top: self.top - rhs.top,
left: self.left - rhs.left,
bottom: self.bottom - rhs.bottom,
right: self.right - rhs.right,
}
}
}
|
# Contributing
- If you have a suggestion, error to report, or general help to the project as a whole,
open a Issue and make it clear in the title as to what you want/ issue type etc.
- EOL Style
- Fork, branch, test, pull request is the recommended way to contribute.
- Remember to keep your fork up to date and ready to be merged on pull request submission.
|
require "bookmark_tag"
describe BookmarkTag do
describe ".create" do
it "creates a link between a bookmark and a tag" do
bookmark = Bookmark.create(url: "http://www.makersacademy.com", title: "Makers Academy")
tag = Tag.create(content: "test tag")
bookmark_tag = BookmarkTag.create(bookmark_id: bookmark.id, tag_id: tag.id)
expect(bookmark_tag).to be_a BookmarkTag
expect(bookmark_tag.tag_id).to eq tag.id
expect(bookmark_tag.bookmark_id).to eq bookmark.id
end
end
end
|
# poker-simulator
Attempt to write a poker simulator (for bots) in Go
I've just started developing this, so don't expect anything working.
Requires golang 1.2
## How to run
# Set your GOPATH env variable
export GOPATH=~/projects/poker-simulator
go run src/main.go
|
package orm
import (
"time"
"github.com/hashwing/log"
)
// Timer timer table
type Timer struct {
ID string `xorm:"timer_id" json:"timer_id"`
TaskID string `xorm:"task_id" json:"task_id"`
UserID string `xorm:"user_id" json:"user_id"`
Name string `xorm:"timer_name" json:"timer_name"`
Start int `xorm:"timer_start" json:"timer_start"`
Interval int `xorm:"timer_interval" json:"timer_interval"`
Surplus int `xorm:"-" json:"timer_surplus"`
Repeat int `xorm:"timer_repeat" json:"timer_repeat"`
Status bool `xorm:"timer_status" json:"timer_status"`
Created time.Time `xorm:"created" json:"created"`
}
// CreateTimer create timer
func CreateTimer(t *Timer) error {
_, err := MysqlDB.Table("ansible_timer").Insert(t)
if err != nil {
log.Error(err)
}
return err
}
// FindTimers find timers
func FindTimers(uid string) (*[]Timer, error) {
var timers []Timer
err := MysqlDB.Table("ansible_timer").Where("user_id=?", uid).Find(&timers)
if err != nil {
log.Error(err)
return nil, err
}
return &timers, err
}
// GetTimer get timer
func GetTimer(tid string) (bool, *Timer, error) {
var timer Timer
res, err := MysqlDB.Table("ansible_timer").Where("timer_id=?", tid).Get(&timer)
if err != nil {
log.Error(err)
return false, nil, err
}
return res, &timer, err
}
// UpdateTimerStatus uptate timer status
func UpdateTimerStatus(t *Timer) error {
_, err := MysqlDB.Table("ansible_timer").Cols("timer_status").Where("timer_id=?", t.ID).Update(t)
if err != nil {
log.Error(err)
}
return err
}
// UpdateTimerRun uptate timer status is run
func UpdateTimerRun(t *Timer) error {
_, err := MysqlDB.Table("ansible_timer").Cols("timer_repeat", "timer_start").Where("timer_id=?", t.ID).Update(t)
if err != nil {
log.Error(err)
}
return err
}
// UpdateTimer update timer
func UpdateTimer(t *Timer) error {
_, err := MysqlDB.Table("ansible_timer").Where("timer_id=?", t.ID).Update(t)
if err != nil {
log.Error(err)
}
return err
}
// UpdateTimerStart update timer start
func UpdateTimerStart(t *Timer) error {
_, err := MysqlDB.Table("ansible_timer").Cols("timer_status", "timer_start").Where("timer_id=?", t.ID).Update(t)
if err != nil {
log.Error(err)
}
return err
}
// DelTimer detele timer
func DelTimer(tid string) error {
timer := new(Timer)
_, err := MysqlDB.Table("ansible_timer").Where("timer_id=?", tid).Delete(timer)
if err != nil {
log.Error(err)
return err
}
return nil
}
|
// flow-typed signature: 47721ffa32ca5cd4485c526ef0833ebb
// flow-typed version: <<STUB>>/react-router-native-stack_v^0.0.11/flow_v0.73.0
/**
* This is an autogenerated libdef stub for:
*
* 'react-router-native-stack'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'react-router-native-stack' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'react-router-native-stack/lib/animationTypes' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/findFirstMatch' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/getDimension' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/getDuration' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/getEasing' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/getTransforms' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/index' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/Stack' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/StackTransitioner' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/styles' {
declare module.exports: any;
}
declare module 'react-router-native-stack/lib/transitionTypes' {
declare module.exports: any;
}
// Filename aliases
declare module 'react-router-native-stack/lib/animationTypes.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/animationTypes'>;
}
declare module 'react-router-native-stack/lib/findFirstMatch.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/findFirstMatch'>;
}
declare module 'react-router-native-stack/lib/getDimension.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/getDimension'>;
}
declare module 'react-router-native-stack/lib/getDuration.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/getDuration'>;
}
declare module 'react-router-native-stack/lib/getEasing.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/getEasing'>;
}
declare module 'react-router-native-stack/lib/getTransforms.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/getTransforms'>;
}
declare module 'react-router-native-stack/lib/index.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/index'>;
}
declare module 'react-router-native-stack/lib/Stack.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/Stack'>;
}
declare module 'react-router-native-stack/lib/StackTransitioner.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/StackTransitioner'>;
}
declare module 'react-router-native-stack/lib/styles.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/styles'>;
}
declare module 'react-router-native-stack/lib/transitionTypes.js' {
declare module.exports: $Exports<'react-router-native-stack/lib/transitionTypes'>;
}
|
package backend
import "imperial-splendour-launcher/backend/customErrors"
func (a *API) GoToDownloadPage() error {
a.logger.Infof("Navigating to %s", downloadPageUrl)
if err := a.browser.OpenURL(downloadPageUrl); err != nil {
a.logger.Warnf("Could not open website: %v", err)
return customErrors.Website
}
return nil
}
|
mod graph;
mod tasks;
mod test;
mod unstable_features;
mod worker;
use crate::config::Config;
use crate::crates::Crate;
use crate::experiments::{Experiment, Mode};
use crate::prelude::*;
use crate::results::{TestResult, WriteResults};
use crate::runner::graph::build_graph;
use crate::runner::worker::{DiskSpaceWatcher, Worker};
use crossbeam_utils::thread::{scope, ScopedJoinHandle};
use rustwide::logging::LogStorage;
use rustwide::Workspace;
use std::collections::HashMap;
use std::path::Path;
use std::sync::{Condvar, Mutex};
use std::time::Duration;
const DISK_SPACE_WATCHER_INTERVAL: Duration = Duration::from_secs(300);
const DISK_SPACE_WATCHER_THRESHOLD: f32 = 0.85;
#[derive(Debug, Fail)]
#[fail(display = "overridden task result to {}", _0)]
pub struct OverrideResult(TestResult);
struct RunnerStateInner {
prepare_logs: HashMap<Crate, LogStorage>,
}
struct RunnerState {
inner: Mutex<RunnerStateInner>,
}
impl RunnerState {
fn new() -> Self {
RunnerState {
inner: Mutex::new(RunnerStateInner {
prepare_logs: HashMap::new(),
}),
}
}
fn lock(&self) -> std::sync::MutexGuard<RunnerStateInner> {
self.inner.lock().unwrap()
}
}
pub fn run_ex<DB: WriteResults + Sync>(
ex: &Experiment,
workspace: &Workspace,
crates: &[Crate],
db: &DB,
threads_count: usize,
config: &Config,
) -> Fallible<()> {
if !rustwide::cmd::docker_running(workspace) {
return Err(err_msg("docker is not running"));
}
info!("computing the tasks graph...");
let graph = Mutex::new(build_graph(ex, crates, config));
let parked_threads = Condvar::new();
info!("uninstalling toolchains...");
// Clean out all the toolchains currently installed. This minimizes the
// amount of disk space used by the base system, letting the task execution
// proceed slightly faster than it would otherwise.
for tc in workspace.installed_toolchains()? {
// But don't uninstall it if we're going to reinstall in a couple lines.
if !ex.toolchains.iter().any(|t| tc == t.source) {
tc.uninstall(workspace)?;
}
}
info!("preparing the execution...");
for tc in &ex.toolchains {
tc.install(workspace)?;
if ex.mode == Mode::Clippy {
tc.add_component(workspace, "clippy")?;
}
}
info!("running tasks in {} threads...", threads_count);
let state = RunnerState::new();
let workers = (0..threads_count)
.map(|i| {
Worker::new(
format!("worker-{}", i),
workspace,
ex,
config,
&graph,
&state,
db,
&parked_threads,
)
})
.collect::<Vec<_>>();
let disk_watcher = DiskSpaceWatcher::new(
DISK_SPACE_WATCHER_INTERVAL,
DISK_SPACE_WATCHER_THRESHOLD,
&workers,
);
scope(|scope| -> Fallible<()> {
let mut threads = Vec::new();
for worker in &workers {
let join =
scope
.builder()
.name(worker.name().into())
.spawn(move || match worker.run() {
Ok(()) => Ok(()),
Err(r) => {
log::warn!("worker {} failed: {:?}", worker.name(), r);
Err(r)
}
})?;
threads.push(join);
}
let disk_watcher_thread =
scope
.builder()
.name("disk-space-watcher".into())
.spawn(|| {
disk_watcher.run();
Ok(())
})?;
let clean_exit = join_threads(threads.drain(..));
disk_watcher.stop();
let disk_watcher_clean_exit = join_threads(std::iter::once(disk_watcher_thread));
if clean_exit && disk_watcher_clean_exit {
Ok(())
} else {
bail!("some threads returned an error");
}
})?;
// Only the root node must be present
let mut g = graph.lock().unwrap();
assert!(g.next_task(ex, db, "master").is_finished());
assert_eq!(g.pending_crates_count(), 0);
Ok(())
}
fn join_threads<'a, I>(iter: I) -> bool
where
I: Iterator<Item = ScopedJoinHandle<'a, Fallible<()>>>,
{
let mut clean_exit = true;
for thread in iter {
match thread.join() {
Ok(Ok(())) => {}
Ok(Err(err)) => {
crate::utils::report_failure(&err);
clean_exit = false;
}
Err(panic) => {
crate::utils::report_panic(&panic);
clean_exit = false;
}
}
}
clean_exit
}
pub fn dump_dot(ex: &Experiment, crates: &[Crate], config: &Config, dest: &Path) -> Fallible<()> {
info!("computing the tasks graph...");
let graph = build_graph(ex, crates, config);
info!("dumping the tasks graph...");
::std::fs::write(dest, format!("{:?}", graph.generate_dot()).as_bytes())?;
info!("tasks graph available in {}", dest.to_string_lossy());
Ok(())
}
|
<?php
namespace App\Http\Controllers;
use App\Models\category;
use Illuminate\Http\Request;
use Illuminate\Http\Response;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\Auth;
use Illuminate\Support\Facades\DB;
use Illuminate\Database\Eloquent\SoftDeletes;
class CategoryController extends Controller
{
public function __construct(){
$this->middleware('auth');
}
public function AllCat(){
$categories = category::latest()->paginate(3);
$trashCat = category::onlyTrashed()->latest()->paginate(5);
// $categories = DB::table('categories')->latest()->paginate(5);
return view('admin.category.index',compact('categories','trashCat'));
}
public function AddCat(Request $request){
$validation = $request->validate([
"category_name" => "required|unique:categories|max:225",
],
[
'category_name.required'=>'Please input Category Name',
'category_name.max' => 'Category Less Then 255Chars' ,
]);
// category::insert([
// 'category_name' => $request->category_name,
// 'user_id' => Auth::user()->id,
// 'created_at'=>Carbon::now()
// ]);
//Adding data
$categories = new category;
$categories->category_name = $request->category_name;
$categories->user_id = Auth::user()->id;
$categories->save();
return Redirect()->back()->with('success','Category inserted Successfull');
}
public function Edit($id){
$categories = category::find($id);
return view('admin.category.edit',compact('categories'));
}
public function Update(Request $request , $id){
$update = category::find($id)->update([
'category_name'=>$request->category_name,
'user_id' => Auth::user()->id
]);
return Redirect()->route('all.category')->with('success','Category updated Successfull');
}
public function softDelete($id){
$delete = category::find($id);
if($delete != null){
$delete->delete();
return Redirect()->back()->with('success','Category deleted successfully 👌🔥');
}else{
return Redirect()->back()->with('success','Failed to delete');
}
// return redirect()->with('success','Category deleted successfully 👌🔥');
}
public function Restore($id){
$restore = category::withTrashed()->find($id)->restore();
if($restore != null){
return Redirect()->back()->with('success','Category has been successfully restored👌🔥');
}
else{
return Redirect()->back()->with('failed','🔴🔴🔴 Something went wrong');
}
}
public function Pdelete($id){
$pdelete = category::onlyTrashed()->find($id)->forceDelete();
if($pdelete != null){
return Redirect()->back()->with('success','RECORD HAS BEEN PERMANANTLY DELETED');
}
else{
return Redirect()->back()->with('failed','🔴🔴🔴');
}
}
}
|
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.androiddevchallenge
import androidx.lifecycle.LiveData
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.example.androiddevchallenge.extensions.remainingDays
import com.example.androiddevchallenge.extensions.remainingHours
import com.example.androiddevchallenge.extensions.remainingMinutes
import com.example.androiddevchallenge.extensions.remainingSeconds
import java.util.Calendar
import java.util.Timer
import java.util.TimerTask
import kotlin.math.roundToInt
class MainViewModel : ViewModel() {
companion object {
const val COUNTDOWN_INTERVAL = 1000L
}
private val birthDay = Calendar.getInstance().apply {
set(Calendar.MONTH, Calendar.MARCH)
set(Calendar.DAY_OF_MONTH, 18)
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
}
private val _days = MutableLiveData("0")
private val _daysChanged = MutableLiveData(false)
private val _hours = MutableLiveData("0")
private val _minutes = MutableLiveData("0")
private val _seconds = MutableLiveData("0")
val days: LiveData<String> = _days
val daysChanged: LiveData<Boolean> = _daysChanged
val hours: LiveData<String> = _hours
val minutes: LiveData<String> = _minutes
val seconds: LiveData<String> = _seconds
init {
val timerTask = object : TimerTask() {
override fun run() {
val interval = birthDay.timeInMillis - System.currentTimeMillis()
_days.postValue(interval.remainingDays().roundToInt().toString())
_daysChanged.postValue(true)
_hours.postValue(interval.remainingHours().roundToInt().toString())
_minutes.postValue(interval.remainingMinutes().roundToInt().toString())
_seconds.postValue(interval.remainingSeconds().roundToInt().toString())
}
}
Timer().schedule(timerTask, 0, COUNTDOWN_INTERVAL)
}
}
|
export enum QLogon {
LOG_ON_SERVICE_USER = 'LOG_ON_SERVICE_USER',
LOG_ON_CURRENT_USER = 'LOG_ON_CURRENT_USER'
} |
%% @author Maas-Maarten Zeeman <[email protected]>
%% @copyright 2014 Maas-Maarten Zeeman
%%
%% @doc exometer_rrets, a rrets based persistency layer for exometer.
%%
%% Copyright 2014 Maas-Maarten Zeeman
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% Erlang diff-match-patch implementation
-module(exometer_report_rrets).
-author("Maas-Maarten Zeeman <[email protected]>").
-behaviour(exometer_report).
%% exometer_report callback API
-export([
exometer_init/1,
exometer_info/2,
exometer_cast/2,
exometer_call/3,
exometer_report/5,
exometer_subscribe/5,
exometer_unsubscribe/4,
exometer_terminate/2,
exometer_newentry/2,
exometer_setopts/4
]).
-include_lib("exometer_core/include/exometer.hrl").
-record(state, {
storage :: rrets:storage()
}).
exometer_init(Opts) ->
RRetsArgs = get_opt(rrets_args, Opts),
case rrets:open(RRetsArgs) of
{error, _Reason}=Error ->
lager:error("Error triggered while opening. Error: ~p.", [Error]),
Error;
{ok, Storage} ->
{ok, #state{storage=Storage}}
end.
exometer_subscribe(_Metric, _DataPoint, _Extra, _Interval, State) ->
{ok, State}.
exometer_unsubscribe(_Metric, _DataPoint, _Extra, State) ->
{ok, State}.
exometer_report(Metric, DataPoint, _Extra, Value, #state{storage=Storage}=State) ->
Entry = {Metric, DataPoint, Value},
rrets:log(Storage, Entry),
{ok, State}.
exometer_call(info, _From, #state{storage=Storage}=State) ->
Info = rrets:info(Storage),
{reply, Info, State};
exometer_call(Unknown, From, State) ->
lager:info("Unknown call ~p from ~p", [Unknown, From]),
{ok, State}.
exometer_cast(sync, #state{storage=Storage}=State) ->
rrets:sync(Storage),
{ok, State};
exometer_cast(Unknown, State) ->
lager:info("Unknown cast: ~p", [Unknown]),
{ok, State}.
exometer_info(Unknown, State) ->
lager:info("Unknown info: ~p", [Unknown]),
{ok, State}.
exometer_newentry(_Entry, State) ->
{ok, State}.
exometer_setopts(_Metric, _Options, _Status, State) ->
{ok, State}.
exometer_terminate(_, #state{storage=Storage}) ->
ok = rrets:close(Storage).
%%
%% Helpers
%%
get_opt(K, Opts) ->
case lists:keyfind(K, 1, Opts) of
{_, V} -> V;
false -> error({required, K})
end.
|
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed');
class about extends CI_Controller {
public function __construct()
{
parent :: __construct();
$this->load->model('admin/master_model');
}
function index($menuid)
{
$user_id = $this->session->userdata('UserID');
$check=$this->master_model->auth_read($user_id, $menuid);
if($check==false)
{
redirect('admin/admin_login/redirectNoAuthUser');
}else
{
$data['data_about']=$this->master_model->select_in('ms_about', '*', "WHERE ID=1");
$this->load->view('admin/about/main', $data);
}
}
function insert($menuid)
{
$this->load->view('admin/about/insert');
}
function update_process($menuid, $ID)
{
$name=$this->input->post('name');
//$ID=$this->master_model->mst_last_id('ms_about');
$data=array
(
'name' => $name,
'note' => $this->input->post('desc'),
);
$this->db->where('ID',$ID);
$this->db->update('ms_about', $data);
redirect('admin/about/index/'.$menuid);
}
function edit($menuid, $ID)
{
$data['data_edit']=$this->master_model->mst_data_edit('ms_about', $ID);
$this->load->view('admin/about/edit', $data);
}
function next($menuid, $ID)
{
$ID=$ID+1;
redirect('admin/about/edit/'.$menuid.'/'.$ID);
}
function previous($menuid, $ID)
{
$ID=$ID-1;
redirect('admin/about/index/'.$menuid.'/'.$ID);
}
function edit_process($menuid, $ID)
{
$name=$this->input->post('name');
$name_e=$this->input->post('name_e');
$data=array
(
'name' => $name,
'name_e'=> $name_e,
'note' => $this->input->post('desc'),
'note_e'=> $this->input->post('desc2'),
);
$this->db->where('ID', $ID);
$this->db->update('ms_about', $data);
redirect('admin/about/index/'.$menuid);
}
function update($menuid)
{
$ID = $this->input->post('ID');
$sort = $this->input->post('sort');
for($a=0 ; $a < count($ID) ; $a++)
{
$this->master_model->mst_update('ms_about', "sort='$sort[$a]'", $ID[$a]);
}
redirect('admin/about/index/'.$menuid);
}
function publish($menuid, $ID, $publish)
{
if($publish==0)
{
$this->db->where('ID', $ID);
$this->db->set('publish','1');
$this->db->update('ms_about');
}else
{
$this->db->where('ID', $ID);
$this->db->set('publish','0');
$this->db->update('ms_about');
}
redirect('admin/about/index/'.$menuid);
}
}
?> |
namespace OpenCvSharp
{
#if LANG_JP
/// <summary>
/// cvHoughLines2で用いる、ハフ変換(直線検出)の種類
/// </summary>
#else
/// <summary>
/// The Hough transform variant
/// </summary>
#endif
public enum HoughLinesMethod : int
{
#if LANG_JP
/// <summary>
/// 標準的ハフ変換.
/// 全ての線分は2つの浮動小数点値 (ρ, θ)で表される.ここでρ は点(0,0) から線分までの距離,θ はx軸と線分の法線が成す角度.
/// そのため,行列(作成されるシーケンス)は,CV_32FC2 タイプとなる.
/// [CV_HOUGH_STANDARD]
/// </summary>
#else
/// <summary>
/// Classical or standard Hough transform.
/// Every line is represented by two floating-point numbers (ρ, θ), where ρ is a distance between (0,0) point and the line,
/// and θ is the angle between x-axis and the normal to the line.
/// Thus, the matrix must be (the created sequence will be) of CV_32FC2 type.
/// [CV_HOUGH_STANDARD]
/// </summary>
#endif
Standard = CvConst.CV_HOUGH_STANDARD,
#if LANG_JP
/// <summary>
/// 確率的ハフ変換(画像に長い線が少ない場合に有効).
/// 全ての線を返すのではなく,線分を返す. 全ての線分は始点と終点で表され,行列(作成されるシーケンス)は,CV_32SC4 タイプとなる.
/// [CV_HOUGH_PROBABILISTIC]
/// </summary>
#else
/// <summary>
/// Probabilistic Hough transform (more efficient in case if picture contains a few long linear segments).
/// It returns line segments rather than the whole lines. Every segment is represented by starting and ending points,
/// and the matrix must be (the created sequence will be) of CV_32SC4 type.
/// [CV_HOUGH_PROBABILISTIC]
/// </summary>
#endif
Probabilistic = CvConst.CV_HOUGH_PROBABILISTIC,
#if LANG_JP
/// <summary>
/// マルチスケール型の古典的ハフ変換. 線は CV_HOUGH_STANDARD と同様の方法でエンコードされる.
/// [CV_HOUGH_MULTI_SCALE]
/// </summary>
#else
/// <summary>
/// Multi-scale variant of classical Hough transform. The lines are encoded the same way as in HoughLinesMethod.Standard.
/// [CV_HOUGH_MULTI_SCALE]
/// </summary>
#endif
MultiScale = CvConst.CV_HOUGH_MULTI_SCALE,
}
}
|
#!/bin/bash
HOSTS=(master1 master2 master3 infra1 infra2 infra3 app1 app2 app3 app4 app5 stor1 stor2 stor3 stor4)
for host in ${HOSTS[*]}
do
echo $host
ssh -o StrictHostKeyChecking=no $host "ls"
done
|
#import ssd1306
from hcsr04 import HCSR04
from umqttsimple import MQTTClient
from machine import Pin,PWM
import time
import ubinascii
import machine
import micropython
import network
import esp
import gc
from config import *
esp.osdebug(None)
gc.collect()
mqtt_server = '192.168.17.5'
topic_sub = b'Motion/EntryMotion/STATE'
topic_pub = b'Doorbell/STATE'
motionFlag = 0
motionLedDuration = 10
ledWhite = machine.Pin(5, machine.Pin.OUT)
doorbellFlag = 0
blinkCounter = 3
distanceLimit = 400
sensor = HCSR04(trigger_pin=13, echo_pin=12,echo_timeout_us=1000000)
wlan=network.WLAN(network.STA_IF)
#====================================
def connectWiFi(ID,password):
i=0
wlan.active(True)
wlan.disconnect()
wlan.connect(ID, password)
while(wlan.ifconfig()[0]=='0.0.0.0'):
i = i + 1
time.sleep(1)
if (i > 20):
break
return True
#====================================
def sub_cb(topic, msg):
global motionFlag
print('sub rcv:')
print((topic, msg))
motionFlag = motionLedDuration
#====================================
def connect_and_subscribe():
global client_id, mqtt_server, topic_sub
client = MQTTClient(client_id, mqtt_server)
client.set_callback(sub_cb)
client.connect()
client.subscribe(topic_sub)
print('Connected to %s MQTT broker, subscribed to %s topic' % (mqtt_server, topic_sub))
return client
#====================================
def restart_and_reconnect():
print('Failed to connect to MQTT broker. Reconnecting...')
time.sleep(10)
machine.reset()
#====================================
def processMotionLed():
global motionFlag
print('led delay = {0}'.format(motionFlag))
if (motionFlag > 0):
if (motionFlag == motionLedDuration):
ledWhite.value(1)
print('led ON = {0}'.format(motionFlag))
elif (motionFlag == 1):
ledWhite.value(0)
print('led OFF = {0}'.format(motionFlag))
if (motionFlag > 0):
motionFlag = motionFlag - 1
#====================================
def processDoorbell():
global motionFlag , doorbellFlag
doorbellFlag = 1
print('<<DoorBell>>')
client.publish(topic_pub, str(distance))
motionFlag = 0
ledWhite.value(0)
i = 0
while i < blinkCounter:
ledWhite.value(1)
time.sleep(0.6)
ledWhite.value(0)
time.sleep(0.3)
i = i + 1
doorbellFlag = 0
#=====Starting point===============================
print("Start Execution\n")
connectWiFi(SSID,PSW)
client_id = ubinascii.hexlify(machine.unique_id())
if not wlan.isconnected():
print('connecting to network...' + SSID)
connectWiFi(SSID, PSW)
time.sleep(2)
print("Wifi connected\n")
try:
client = connect_and_subscribe()
print("MQTT connected\n")
except OSError as e:
print("MQTT error\n")
restart_and_reconnect()
while True:
try:
client.check_msg()
#if (time.time() - last_message) > message_interval:
distance = sensor.distance_mm()
if (distance > 0 and distance < distanceLimit and doorbellFlag == 0):
processDoorbell()
print('distance = {0}'.format(distance))
processMotionLed()
#last_message = time.time()
#counter += 1
time.sleep(1)
except OSError as e:
restart_and_reconnect()
|
<?php $__env->startComponent('mail::message'); ?>
# Introduction
Hi Users! This is the response message.
Please don't reply.
Thanks,
<?php echo e(config('app.name')); ?>
<?php echo $__env->renderComponent(); ?>
<?php /**PATH /var/www/html/uLaravel/Thewayshop/resources/views/emails/welcome.blade.php ENDPATH**/ ?> |
<?php
namespace App\Console\Commands;
use App\Repositories\WeatherRepositoryInterface;
use Illuminate\Console\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class AddWeatherCommand extends Command
{
private $weatherRepository;
public function __construct(WeatherRepositoryInterface $weatherRepository)
{
$this->weatherRepository = $weatherRepository;
parent::__construct();
}
protected $signature = 'weather:add';
public function execute(InputInterface $input, OutputInterface $output)
{
$city = $this->ask('City');
$temperature = $this->ask('Temperature');
$tempFeeling = $this->ask('Feeling temperature');
$humidity = $this->ask('Humidity');
$windSpeed = $this->ask('Wind speed');
$windDirection = $this->ask('Wind direction');
$pressure = $this->ask('Pressure');
$precipitation = $this->ask('Precipitation');
$data = [
'city' => $city,
'temperature' => $temperature,
'temperature_feeling' => $tempFeeling,
'humidity' => $humidity,
'wind_speed' => $windSpeed,
'wind_direction' => $windDirection,
'pressure' => $pressure,
'precipitation' =>$precipitation
];
$this->weatherRepository->addWeather($data);
}
}
|
package main
import (
"github.com/thaniri/folrep/controller"
"net/http"
"time"
)
func main() {
controller := controller.New()
webApp := &http.Server{
Addr: "0.0.0.0:8080",
Handler: controller,
WriteTimeout: 15 * time.Second,
ReadTimeout: 15 * time.Second,
IdleTimeout: 60 * time.Second,
}
webApp.ListenAndServe()
}
|
/**
* Define the steps in the sample.
*
* @author Shin Feng
* @date 2018.2.27
* @version 1.0.0
*
*/
const { Given, When, Then } = require('cucumber');
Given(/^a request data: (.*) for (.*) interface$/, function(data, api) {
this.setApi(api);
this.setData(data);
});
Given(/^a request data from "(.*)" for (.*) interface$/, function(file, api) {
this.setApi(api);
this.readFile(file);
});
When(/^I send a (.*) request$/, function(type, callback) {
this.sendRequest(type, callback);
});
Then(/^the response should be (.*)$/, function(response) {
this.verifyResponse(response);
});
Then(/^the response "(.*)" should be (.*)$/, function(param, value) {
this.verifyJsonResponseParam(param, value);
});
Given(/^(.*)接口:发送数据“(.*)”$/, function(api, data) {
this.setApi(api);
this.setData(data);
});
Given(/^(.*)接口:发送数据从文件“(.*)”$/, function(api, file) {
this.setApi(api);
this.readFile(file);
});
When(/^发送(.*)请求$/, function(type, callback) {
this.sendRequest(type, callback);
});
Then(/^响应应为:(.*)$/, function(response) {
this.verifyResponse(response);
});
Then(/^响应参数“(.*)”应为:(.*)$/, function(param, value) {
this.verifyJsonResponseParam(param, value);
}); |
#pragma once
#include <stdio.h>
#include <tchar.h>
#include <stdlib.h>
#include <string.h>
#include <vector>
#include <set>
#include <deque>
#include <map>
#include <string>
#include <algorithm>
#include <functional>
#include <mutex>
#include <atomic>
#include "..\recompiler_core\build.h"
// Widgets
#define WXUSINGDLL
#define wxUSE_EXCEPTIONS 0
#define wxUSE_RICHTEXT 1
#define wxNO_PNG_LIB
#include "wx/wxprec.h"
#include "wx/xrc/xmlres.h"
#include "wx/splitter.h"
#include "wx/aui/aui.h"
#include "wx/aui/auibar.h"
#include "wx/dcbuffer.h"
#include "wx/evtloop.h"
#include "wx/statusbr.h"
#include "wx/menu.h"
#include "wx/gdicmn.h"
#include "wx/treectrl.h"
#include "wx/toolbar.h"
#include "wx/spinctrl.h"
#include "wx/srchctrl.h"
#include "wx/combobox.h"
#include "wx/treectrl.h"
#include "wx/panel.h"
#include "wx/statline.h"
#include "wx/sizer.h"
#include "wx/splitter.h"
#include "wx/frame.h"
#include "wx/aui/aui.h"
#include "wx/artprov.h"
#include "wx/laywin.h"
#include "wx/bmpcbox.h"
#include "wx/xrc/xmlres.h"
#include "wx/image.h"
#include "wx/collpane.h"
#include "wx/dcbuffer.h"
#include "wx/tglbtn.h"
#include "wx/clipbrd.h"
#include "wx/fileconf.h"
#include "wx/datectrl.h"
#include "wx/listbase.h"
#include "wx/listctrl.h"
#include "wx/grid.h"
#include "wx/colordlg.h"
#include "wx/filepicker.h"
#include "wx/numdlg.h"
#include "wx/html/htmlwin.h"
#include "wx/clrpicker.h"
#include "wx/ribbon/toolbar.h"
#include "wx/valnum.h"
//#include "wx/treelist.h"
#include "wx/dataview.h"
#include "wx/renderer.h"
#include "wx/richtext/richtextctrl.h"
#include "wx/filename.h"
#include "wx/stdpaths.h"
#include "wx/wfstream.h"
#include "wx/filedlg.h"
#include "wx/process.h"
#include "wx/datetime.h"
#include "wx/accel.h"
// Backend
#include "../recompiler_core/build.h"
// Crap
#include "config.h"
#include "app.h"
#include "bitmaps.h"
#include "logWindow.h"
template< typename T >
const T& TemplateClamp(const T& v, const T& a, const T& b)
{
if (v <= a) return a;
if (v >= b) return b;
return v;
}
namespace tools
{
class Project;
class ProjectImage;
class MemoryView;
enum class ValueViewMode
{
Auto,
Hex,
};
static const uint64 INVALID_ADDRESS = ~(uint64)0;
//-----------------------------------------------------------------------------
/// Navigation type
enum class NavigationType
{
LocalStart,
LocalEnd,
LocalStepBack,
LocalStepIn,
GlobalStart,
GlobalEnd,
GlobalStepBack,
GlobalStepIn,
HorizontalPrev,
HorizontalNext,
ToggleBreakpoint,
RunForward,
RunBackward,
SyncPos,
HistoryBack,
HistoryForward,
Follow,
ReverseFollow,
Advance,
};
// call tree navigation
class INavigationHelper
{
public:
virtual ~INavigationHelper() {};
virtual std::shared_ptr<ProjectImage> GetCurrentImage() { return nullptr; }
virtual MemoryView* GetCurrentMemoryView() { return nullptr; }
virtual bool NavigateToFrame(const TraceFrameID id) { return false; };
virtual bool NavigateToCodeAddress(const uint64 id, const bool addToHistory) { return false; };
virtual bool NavigateToMemoryAddress(const uint64 memoryAddress) { return false; };
virtual bool Navigate(const NavigationType type) { return false; };
};
//-----------------------------------------------------------------------------
} // tools |
using System;
using System.Collections.Generic;
using Devdog.General.ThirdParty.UniLinq;
using System.Text;
namespace Devdog.General.UI
{
public interface IUIWindowInputHandler
{
}
}
|
<?php
declare(strict_types=1);
namespace Tulia\Cms\Content\Type\Infrastructure\Framework\Form\Service;
use Tulia\Cms\Content\Type\Domain\WriteModel\Service\Configuration;
use Tulia\Cms\Content\Type\Infrastructure\Framework\Form\ContentTypeFormDescriptor;
use Tulia\Component\Templating\View;
/**
* @author Adam Banaszkiewicz
*/
class LayoutBuilder
{
private LayoutTypeBuilderRegistry $builderRegistry;
private Configuration $config;
public function __construct(LayoutTypeBuilderRegistry $builderRegistry, Configuration $config)
{
$this->builderRegistry = $builderRegistry;
$this->config = $config;
}
public function build(ContentTypeFormDescriptor $formDescriptor): View
{
$type = $formDescriptor->getContentType();
return $this->builderRegistry
->get($this->config->getLayoutBuilder($type->getType()))
->editorView($type, $formDescriptor->getFormView(), $formDescriptor->getViewContext());
}
}
|
# Automatically logout after 30 minutes idle at command prompt (unless in X).
if [ -z "$DISPLAY" ]; then
TMOUT=1800
readonly TMOUT
export TMOUT
fi
|
class Symbol
# Returns the Symbol as a lowerCamelCase String.
def to_soap_key
to_s.to_soap_key.lower_camelcase
end
end |
# natapp
python tornado mongodb 可缓存版-内网转外网访问工具
# 使用方式
1. conf.txt 为配置文件
port为对外开放端口
url_base 为部署到外网的访问地址,供内网服务请求和转发数据
url_target 为内网转发访问的原网址. 格式为http[s]://[IP]:[Port]/
2. 默认mongodb 数据库
2. 外网访问启动service.py
3. 内网启动consumer.py
4. 程序默认包含了一个半自动化的爬虫效果.有兴趣可以交流交流
|
#include <stdio.h>
#include <stdlib.h>
int main (){
size_t maxSeq(int * array, size_t n);
int array1[]={1,2,3};
int array2[]={1};
int array4[]={0};
int array5[]={};
int array6[]={-1,-2,-3};
int array7[]={4294967293,4294967294,4294967295};
int array8[]={-2,-1,0,1,2};
int array9[]={1,2,2,3,4,5};
int array10[]={4294967295};
if(maxSeq(array1,3)!=3){
printf("Failed on 1,2,3\n");
exit(EXIT_FAILURE);}
if(maxSeq(array2,1)!=1){
printf("Failed on 1\n");
exit(EXIT_FAILURE);}
if(maxSeq(array4,1)!=1){
printf("Failed on 0\n");
exit(EXIT_FAILURE);}
if(maxSeq(array5,0)!=0){
printf("Failed on Null\n");
exit(EXIT_FAILURE);}
if(maxSeq(array6,3)!=1){
printf("Failed on -1,-2,-3\n");
exit(EXIT_FAILURE);}
if(maxSeq(array7,3)!=3){
printf("array7 failed\n");
exit(EXIT_FAILURE);}
if(maxSeq(array8,5)!=5){
printf("Failed on -2,-1,0,1,2\n");
exit(EXIT_FAILURE);}
if(maxSeq(array9,6)!=4){
printf("Failed on 1,2,2,3,4,5 b\n");
exit(EXIT_FAILURE);}
if(maxSeq(array10,1)!=1){
printf("Failed on big");
exit(EXIT_FAILURE);}
return(EXIT_SUCCESS);
}
|
<?php
namespace App\Controllers;
use Bright\Controller\Controller;
use Bright\View\View;
/**
* Home controller
*/
class Home
{
public function index()
{
View::renderTemplate('index.php', [
'name' => 'Bright Framework',
'author' => 'Asyraf Hussin'
]);
}
public function test($any)
{
echo "Test from home controller ".$any;
}
/**
* Before filter
*
* @return void
*/
protected function before()
{
//echo "(before) ";
//return false;
}
/**
* After filter
*
* @return void
*/
protected function after()
{
//echo " (after)";
}
/**
* Show the index page
*
* @return void
*/
public function indexAction()
{
// View::render('index.php', [
// 'name' => 'Bright Framework',
// 'author' => 'Asyraf Hussin'
// ]);
// Render with twig
View::renderTemplate('index.php', [
'name' => 'Bright Framework',
'author' => 'Asyraf Hussin'
]);
}
}
|
from django.shortcuts import render, get_object_or_404
from .models import Kategor, Tovar, Tovar_inphoto, Tovar_img
from django.views.generic.edit import FormView
from django.contrib.auth.forms import UserCreationForm
from django.http import JsonResponse
from django.utils.http import is_safe_url
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout
from django.utils.decorators import method_decorator
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic import FormView, RedirectView
from django.http import HttpResponseRedirect
from django.views.generic.base import View
from django.contrib.auth import logout
from django.http import HttpResponse
from django.shortcuts import render_to_response
from cart.forms import CartAddProductForm
from .forms import TovarForm
from .forms import ContactForm
from django.core.mail import send_mail,BadHeaderError
def index(request):
return render(request, 'shop/index.html', {})
def info(request):
return render(request, 'shop/info_gl_str.html', {})
def log1(request):
return render(request, 'shop/login_menu.html', {})
def login(request):
if request.method == 'POST':
email = request.form.get("email")
password = request.form.get("passwd")
return render(request, 'shop/login.html', {'email': email, 'password': password})
class RegisterFormView(FormView):
form_class = UserCreationForm
# Ссылка, на которую будет перенаправляться пользователь в случае успешной регистрации.
# В данном случае указана ссылка на страницу входа для зарегистрированных пользователей.
success_url = "/log1/"
# Шаблон, который будет использоваться при отображении представления.
template_name = "shop/login_menu.html"
def form_valid(self, form):
# Создаём пользователя, если данные в форму были введены корректно.
form.save()
# Вызываем метод базового класса
return super(RegisterFormView, self).form_valid(form)
# Функция для установки сессионного ключа.
# По нему django будет определять, выполнил ли вход пользователь.
class LoginView(FormView):
"""
Provides the ability to login as a user with a username and password
"""
success_url = '/'
form_class = AuthenticationForm
redirect_field_name = REDIRECT_FIELD_NAME
template_name = "shop/login.html"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.request = None
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
# Sets a test cookie to make sure the user has cookies enabled
request.session.set_test_cookie()
return super(LoginView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
auth_login(self.request, form.get_user())
# If the test cookie worked, go ahead and
# delete it since its no longer needed
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
return super(LoginView, self).form_valid(form)
class LogoutView(View):
def get(self, request):
# Выполняем выход для пользователя, запросившего данное представление.
logout(request)
# После чего, перенаправляем пользователя на главную страницу.
return HttpResponseRedirect("/")
def ProductList(request, category_slug=None):
category = None
categories = Kategor.objects.all()
products = Tovar.objects.filter(tovar_available=True)
if category_slug:
category = get_object_or_404(Kategor, kategory_slug=category_slug)
products = products.filter(kategory_id=category)
if 'phone_name' in request.GET:
products = products.filter(tovar_name__icontains=request.GET['phone_name'])
checki = request.GET.get('checki', False)
check0 = request.GET.get('check0', False)
check1 = request.GET.get('check1', False)
check2 = request.GET.get('check2', False)
check3 = request.GET.get('check3', False)
check4 = request.GET.get('check4', False)
check5 = request.GET.get('check5', False)
check6 = request.GET.get('check6', False)
check7 = request.GET.get('check7', False)
check8 = request.GET.get('check8', False)
check9 = request.GET.get('check9', False)
check10 = request.GET.get('check10', False)
check11 = request.GET.get('check11', False)
check12 = request.GET.get('check12', False)
check13 = request.GET.get('check13', False)
check14 = request.GET.get('check14', False)
check15 = request.GET.get('check15', False)
check16 = request.GET.get('check16', False)
check17 = request.GET.get('check17', False)
check18 = request.GET.get('check18', False)
check19 = request.GET.get('check19', False)
check20 = request.GET.get('check20', False)
check21 = request.GET.get('check21', False)
check22 = request.GET.get('check22', False)
check23 = request.GET.get('check23', False)
check24 = request.GET.get('check24', False)
if checki:
try:
z = [check0, check1, check2, check3, check4, check5, check6, check7, check8, check9,
check10, check11, check12, check13, check14, check15, check16, check17,
check18, check19, check20, check21, check22, check23, check24]
a0 = Tovar_inphoto.objects.filter(tovarinphoto_proizv__in=[z[0], z[1], z[2], z[6]])
list_a0 = []
for i in a0.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a0.append(i)
a1 = Tovar_inphoto.objects.filter(tovarinphoto_diagon__in=[z[3], z[4], z[5]])
list_a1 = []
for i in a1.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a1.append(i)
a2 = Tovar_inphoto.objects.filter(tovarinphoto_ram__in=[z[7], z[8], z[9], z[10]])
list_a2 = []
for i in a2.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a2.append(i)
a3 = Tovar_inphoto.objects.filter(
tovarinphoto_osnkamera__in=[z[11], z[12], z[13], z[14], z[15], z[16], z[17]])
list_a3 = []
for i in a3.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a3.append(i)
a4 = Tovar_inphoto.objects.filter(tovarinphoto_opsystem__in=[z[18], z[19], z[20]])
list_a4 = []
for i in a4.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a4.append(i)
a5 = Tovar_inphoto.objects.filter(tovarinphoto_cpu__in=[z[21], z[22], z[23], z[24]])
list_a5 = []
for i in a5.values_list('tovarinphoto_proizv', flat=True).order_by('tovarinphoto_proizv'):
list_a5.append(i)
list_all_p = []
list_all_proizv = [list_a0, list_a1, list_a2, list_a3, list_a4, list_a5]
for i in list_all_proizv:
if len(i) != 0:
list_all_p.append(set(i))
else:
pass
iter_a = list_all_p[0]
for i in list_all_p:
iter_a &= i
iter_a = list(iter_a)
list_tovar = []
for i in Tovar_inphoto.objects.filter(tovarinphoto_proizv__in=iter_a):
list_tovar.append(i.tovar_id.tovar_name)
products = Tovar.objects.filter(tovar_name__in=list_tovar)
except IndexError:
pass
return render(request, 'shop/smartfons.html', {
'category': category,
'categories': categories,
'products': products
})
def get_category_list(max_results=0, starts_with=''):
cat_list = []
if starts_with:
cat_list = Tovar.objects.filter(tovar_name__icontains=starts_with)
if max_results > 0:
if len(cat_list) > max_results:
cat_list = cat_list[:max_results]
return cat_list
def suggest_category(request):
cat_list = []
starts_with = ''
if request.method == 'GET':
starts_with = request.GET['suggestion']
cat_list = get_category_list(8, starts_with)
return render(request, 'shop/category_list.html', {'cat_list': cat_list})
# Страница товара
def ProductDetail(request, id, slug):
product = get_object_or_404(Tovar, id=id, tovar_slug=slug, tovar_available=True)
har = Tovar_inphoto.objects.get(id=id)
fot = har.phototovar.all()[0]
cart_product_form = CartAddProductForm()
return render(request, 'shop/harakteriskick.html', {'product': product, 'har': har,
'cart_product_form': cart_product_form,
'fot': fot})
# def ProductDetail(request, id, slug):
# product = get_object_or_404(Tovar, id=id, tovar_slug=slug, tovar_available=True)
# har = Tovar_inphoto.objects.get(id=id)
# cart_product_form = CartAddProductForm()
# if request.method == "GET":
# form = TovarForm(request.GET.get("phone_name", None))
# print(request.GET['phone_name'])
# if form:
# product = Tovar_inphoto.objects.filter(tovarinphoto_info__icontains=form)
# return render(request, 'shop/harakteriskick.html', {'product': product, 'har': har,
# 'cart_product_form': cart_product_form,
# 'form': form})
# else:
# form = TovarForm()
# return render(request, 'shop/harakteriskick.html', {'product': product, 'har': har,
# 'cart_product_form': cart_product_form,
# 'form': form})
# def ProductDetail(request, id, slug):
# product = get_object_or_404(Tovar, id=id, slug=slug, available=True)
# cart_product_form = CartAddProductForm()
# return render_to_response('shop/product/detail.html',
# {'product': product,
# 'cart_product_form': cart_product_form})
# Функция формы обратной связи
def contactform(reguest):
if reguest.method == 'POST':
form = ContactForm(reguest.POST)
# Если форма заполнена корректно, сохраняем все введённые пользователем значения
if form.is_valid():
subject = form.cleaned_data['subject']
sender = form.cleaned_data['sender']
message = form.cleaned_data['message']
copy = form.cleaned_data['copy']
recepients = ['[email protected]']
# Если пользователь захотел получить копию себе, добавляем его в список получателей
if copy:
recepients.append(sender)
try:
send_mail(subject, message, '[email protected]', recepients)
except BadHeaderError: # Защита от уязвимости
return HttpResponse('Invalid header found')
# Переходим на другую страницу, если сообщение отправлено
return HttpResponseRedirect('/thanks/')
else:
form = ContactForm()
# Выводим форму в шаблон
return render(reguest, 'contact.html', {'form': form })
def thanks(reguest):
thanks = 'thanks'
return render(reguest, 'thanks.html', {'thanks': thanks})
|
# -*-perl-*- hey - emacs - this is a perl file
# src/tools/msvc/pgflex.pl
# silence flex bleatings about file path style
$ENV{CYGWIN} = 'nodosfilewarning';
use strict;
use File::Basename;
# assume we are in the postgres source root
require 'src/tools/msvc/buildenv.pl' if -e 'src/tools/msvc/buildenv.pl';
system('flex -V > NUL');
if ($? != 0)
{
print "WARNING! flex install not found, attempting to build without\n";
exit 0;
}
my $input = shift;
if ($input !~ /\.l$/)
{
print "Input must be a .l file\n";
exit 1;
}
elsif (!-e $input)
{
print "Input file $input not found\n";
exit 1;
}
(my $output = $input) =~ s/\.l$/.c/;
# get flex flags from make file
my $makefile = dirname($input) . "/Makefile";
my ($mf, $make);
open($mf,$makefile);
local $/ = undef;
$make=<$mf>;
close($mf);
my $flexflags = ($make =~ /^\s*FLEXFLAGS\s*=\s*(\S.*)/m ? $1 : '');
system("flex $flexflags -o$output $input");
if ($? == 0)
{
# For non-reentrant scanners we need to fix up the yywrap macro definition
# to keep the MS compiler happy.
# For reentrant scanners (like the core scanner) we do not
# need to (and must not) change the yywrap definition.
my $lfile;
open($lfile,$input) || die "opening $input for reading: $!";
my $lcode = <$lfile>;
close($lfile);
if ($lcode !~ /\%option\sreentrant/)
{
my $cfile;
open($cfile,$output) || die "opening $output for reading: $!";
my $ccode = <$cfile>;
close($cfile);
$ccode =~ s/yywrap\(n\)/yywrap()/;
open($cfile,">$output") || die "opening $output for reading: $!";
print $cfile $ccode;
close($cfile);
}
exit 0;
}
else
{
exit $? >> 8;
}
|
/** \file
* Implementation for \ref include/cqasm-v1-parse-helper.hpp "cqasm-v1-parse-helper.hpp".
*/
#include "cqasm-v1-parse-helper.hpp"
#include "cqasm-v1-parser.hpp"
#include "cqasm-v1-lexer.hpp"
namespace cqasm {
namespace v1 {
namespace parser {
/**
* Parse the given file.
*/
ParseResult parse_file(const std::string &filename) {
return std::move(ParseHelper(filename, "", true).result);
}
/**
* Parse using the given file pointer.
*/
ParseResult parse_file(FILE *file, const std::string &filename) {
return std::move(ParseHelper(filename, file).result);
}
/**
* Parse the given string. A filename may be given in addition for use within
* error messages.
*/
ParseResult parse_string(const std::string &data, const std::string &filename) {
return std::move(ParseHelper(filename, data, false).result);
}
/**
* Parse a string or file with flex/bison. If use_file is set, the file
* specified by filename is read and data is ignored. Otherwise, filename
* is used only for error messages, and data is read instead. Don't use
* this directly, use parse().
*/
ParseHelper::ParseHelper(
const std::string &filename,
const std::string &data,
bool use_file
) : filename(filename) {
// Create the scanner.
if (!construct()) return;
// Open the file or pass the data buffer to flex.
if (use_file) {
fptr = fopen(filename.c_str(), "r");
if (!fptr) {
std::ostringstream sb;
sb << "Failed to open input file " << filename << ": "
<< strerror(errno);
push_error(sb.str());
return;
}
cqasm_v1set_in(fptr, (yyscan_t)scanner);
} else {
buf = cqasm_v1_scan_string(data.c_str(), (yyscan_t)scanner);
}
// Do the actual parsing.
parse();
}
/**
* Construct the analyzer internals for the given filename, and analyze
* the file.
*/
ParseHelper::ParseHelper(
const std::string &filename,
FILE *fptr
) : filename(filename) {
// Create the scanner.
if (!construct()) return;
// Open the file or pass the data buffer to flex.
cqasm_v1set_in(fptr, (yyscan_t)scanner);
// Do the actual parsing.
parse();
}
/**
* Initializes the scanner. Returns whether this was successful.
*/
bool ParseHelper::construct() {
int retcode = cqasm_v1lex_init((yyscan_t*)&scanner);
if (retcode) {
std::ostringstream sb;
sb << "Failed to construct scanner: " << strerror(retcode);
push_error(sb.str());
return false;
} else {
return true;
}
}
/**
* Does the actual parsing.
*/
void ParseHelper::parse() {
int retcode = cqasm_v1parse((yyscan_t) scanner, *this);
if (retcode == 2) {
std::ostringstream sb;
sb << "Out of memory while parsing " << filename;
push_error(sb.str());
return;
} else if (retcode) {
std::ostringstream sb;
sb << "Failed to parse " << filename;
push_error(sb.str());
return;
}
if (result.errors.empty() && !result.root.is_well_formed()) {
std::cerr << *result.root;
throw std::runtime_error("internal error: no parse errors returned, but AST is incomplete. AST was dumped.");
}
}
/**
* Destroys the analyzer.
*/
ParseHelper::~ParseHelper() {
if (fptr) {
fclose(fptr);
}
if (buf) {
cqasm_v1_delete_buffer((YY_BUFFER_STATE)buf, (yyscan_t)scanner);
}
if (scanner) {
cqasm_v1lex_destroy((yyscan_t)scanner);
}
}
/**
* Pushes an error.
*/
void ParseHelper::push_error(const std::string &error) {
result.errors.push_back(error);
}
} // namespace parser
} // namespace v1
} // namespace cqasm
|
@extends('layout.master')
@section('title')
Home: Schedule
@stop
@include('layout.navi')
@section('content')
<br>
<a href="{{ route('schedule.create') }}" class="button">Create New Schedule</a>
<a href="{{ route('venue.index')}}" class="button">View Venues</a>
<br><br>
<table cellpadding="0" cellspacing="0" border="0">
<tbody>
@foreach($schedule as $schedule)
<tr>
<td class="col-sched">{{ $schedule["hometeam"]->display_name }}</td>
<td class="col-sched">{{ $schedule["venue"]->display_name }} <br>
<!-- format then display -->
{{ date('F d, Y', strtotime($schedule->match_date)) }} <br>
{{ date('h:i A', strtotime($schedule->match_time)) }}
</td>
<td class="col-sched">{{ $schedule["awayteam"]->display_name }}</td>
</tr>
@endforeach
</tbody>
</table>
@endsection |
#include <stdio.h>
#include <stdlib.h>
struct CircularQueue {
int head;
int tail;
int *data;
int capacity;
int size;
};
typedef struct CircularQueue Queue;
struct MaybeInt {
int isJust;
int val;
};
Queue initialiseQueue() {
int capacity = 1;
int *d = (int *) malloc(capacity * sizeof(int));
return (Queue) {
.head = 0,
.tail = 0,
.data = d,
.capacity = capacity,
.size = 0
};
}
void freeQueue(Queue *pq) {
free(pq->data);
pq->data = NULL;
}
void printQueue(Queue const *pq) {
printf("Queue(head=%d, tail=%d, capacity=%d) : [", pq->head, pq->tail, pq->capacity);
for (int i = pq->head; i != pq->tail; i = (1 + i) % pq->capacity) {
printf("%d", pq->data[i]);
if ((i + 1) % pq->capacity != pq->tail) {
printf(", ");
}
}
printf("]\n");
}
void growQueue(Queue *pq) {
size_t byteCount = sizeof (int) * pq->capacity;
int *bigger = (int *) malloc(2 * byteCount);
for (int i = 0; i < pq->capacity; i++) {
bigger[i] = pq->data[(pq->head + i) % pq->capacity];
}
free(pq->data);
pq->head = 0;
pq->tail = pq->capacity;
pq->capacity *= 2;
pq->data = bigger;
}
void enqueue(int val, Queue *pq) {
pq->data[pq->tail] = val;
pq->tail = (1 + pq->tail) % pq->capacity;
pq->size += 1;
// The main idea is to never use up all capacity.
// Grow immediately when it happens.
// As such, queues with `head == tail` denote empty queues,
// not "full" queues.
if (pq->tail == pq->head) {
growQueue(pq);
}
}
struct MaybeInt dequeue(Queue *pq) {
if (pq->head == pq->tail) {
return (struct MaybeInt) {
.isJust = 0,
.val = 0
};
} else {
struct MaybeInt result = {
.isJust = 1,
.val = pq->data[pq->head]
};
pq->head = (1 + pq->head) % pq->capacity;
pq->size -= 1;
return result;
}
}
// Not really interactive; problematic due to terminal buffering issues.
// Using a script file like ./queue-actions.txt as STDIN would work.
void interact(Queue *pq) {
char cmd;
while (scanf("%c", &cmd) > 0) {
switch (cmd) {
case 'e':
{
int x;
scanf("%d\n", &x);
enqueue(x, pq);
printf("Enqueued: %d\n", x);
printQueue(pq);
break;
}
case 'd':
{
scanf("\n");
struct MaybeInt mb = dequeue(pq);
if (mb.isJust) {
printf("Dequeued: %d\n", mb.val);
} else {
printf("Dequeueing from empty queue.\n");
}
printQueue(pq);
break;
}
default:
printQueue(pq);
}
}
}
int main(int argc, char *argv[]) {
Queue q = initialiseQueue();
interact(&q);
freeQueue(&q);
return 0;
}
|
// Java program to find celebrity using
// stack data structure
import java.util.Stack;
class pg2
{
// Person with 2 is celebrity
static int MATRIX[][] = { { 0, 0, 1, 0 },
{ 0, 0, 1, 0 },
{ 0, 0, 0, 0 },
{ 0, 0, 1, 0 } };
// Returns true if a knows
// b, false otherwise
static boolean knows(int a, int b)
{
boolean res = (MATRIX[a][b] == 1) ?
true :
false;
return res;
}
// Returns -1 if celebrity
// is not present. If present,
// returns id (value from 0 to n-1).
static int findCelebrity(int n)
{
Stack<Integer> st = new Stack<>();
int c;
// Step 1 :Push everybody
// onto stack
for (int i = 0; i < n; i++)
{
st.push(i);
}
while (st.size() > 1)
{
// Step 2 :Pop off top
// two persons from the
// stack, discard one
// person based on return
// status of knows(A, B).
int a = st.pop();
int b = st.pop();
// Step 3 : Push the
// remained person onto stack.
if (knows(a, b))
{
st.push(b);
}
else
st.push(a);
}
c = st.pop();
// Step 5 : Check if the last
// person is celebrity or not
for (int i = 0; i < n; i++)
{
// If any person doesn't
// know 'c' or 'a' doesn't
// know any person, return -1
if (i != c && (knows(c, i) ||
!knows(i, c)))
return -1;
}
return c;
}
// Driver Code
public static void main(String[] args)
{
int n = 4;
int result = findCelebrity(n);
if (result == -1)
{
System.out.println("No Celebrity");
}
else
System.out.println("Celebrity ID " +
result);
}
}
|
DROP DATABASE IF EXISTS ebid;
CREATE DATABASE ebid;
USE ebid;
CREATE TABLE products (
id int NOT NULL AUTO_INCREMENT,
item VARCHAR(255) NOT NULL,
min_cost FLOAT NOT NULL,
curr_bid FLOAT NOT NULL,
ends_in int NOT NULL,
image VARCHAR(255) NOT NULL,
PRIMARY KEY (id)
);
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Course::Announcement do
let!(:instance) { create(:instance) }
with_tenant(:instance) do
subject { Ability.new(user) }
let(:course) { create(:course) }
let!(:not_started_announcement) { create(:course_announcement, :not_started, course: course) }
let!(:ended_announcement) { create(:course_announcement, :ended, course: course) }
let!(:valid_announcement) { create(:course_announcement, course: course) }
context 'when the user is a Course Student' do
let(:user) { create(:course_student, course: course).user }
it { is_expected.to be_able_to(:show, valid_announcement) }
it { is_expected.to be_able_to(:show, ended_announcement) }
it { is_expected.not_to be_able_to(:show, not_started_announcement) }
it { is_expected.not_to be_able_to(:manage, valid_announcement) }
it 'sees the started announcements' do
expect(course.announcements.accessible_by(subject)).
to contain_exactly(valid_announcement, ended_announcement)
end
end
context 'when the user is a Course Staff' do
let(:user) { create(:course_manager, course: course).user }
it { is_expected.to be_able_to(:manage, valid_announcement) }
it { is_expected.to be_able_to(:manage, ended_announcement) }
it { is_expected.to be_able_to(:manage, not_started_announcement) }
it 'sees all announcements' do
expect(course.announcements.accessible_by(subject)).
to contain_exactly(not_started_announcement, valid_announcement, ended_announcement)
end
end
end
end
|
const controllers = require('./controllers');
const mid = require('./middleware');
const router = (app) => {
app.get('/getToken', mid.requiresSecure, controllers.Account.getToken);
app.get('/getPosts', mid.requiresSecure, controllers.Post.getPosts);
app.get('/getAllPosts', mid.requiresSecure, controllers.Post.getAllPosts);
app.get('/login', mid.requiresSecure, mid.requiresLogout, controllers.Account.loginPage);
app.post('/login', mid.requiresSecure, mid.requiresLogout, controllers.Account.login);
app.get('/changePass', mid.requiresSecure, mid.requiresLogin, controllers.Account.changePass);
app.post('/updatePassword', mid.requiresSecure, mid.requiresLogin,
controllers.Account.updatePassword);
app.get('/getAccountInfo', mid.requiresSecure, mid.requiresLogin,
controllers.Account.getAccountInfo);
app.post('/signup', mid.requiresSecure, mid.requiresLogout, controllers.Account.signup);
app.get('/logout', mid.requiresLogin, controllers.Account.logout);
app.get('/maker', mid.requiresLogin, controllers.Post.makerPage);
app.get('/roster', mid.requiresLogin, controllers.Post.rosterPage);
app.get('/search/:term', mid.requiresLogin, controllers.Post.searchPage);
app.get('/searchPosts/:term', mid.requiresSecure, controllers.Post.searchAllPosts);
app.get('/private', mid.requiresLogin, controllers.Post.privatePage);
app.post('/maker', mid.requiresLogin, controllers.Post.make);
app.get('/detail/:id', mid.requiresSecure, controllers.Post.detailPost);
app.get('/edit/:id', mid.requiresSecure, controllers.Post.editPost);
app.post('/editPost/:id', mid.requiresSecure, controllers.Post.finishEditPost);
app.get('/getPost/:id', mid.requiresSecure, controllers.Post.getPost);
app.get('/', mid.requiresSecure, mid.requiresLogout, controllers.Account.loginPage);
app.get('/*', (req, res) => {
res.render('notFound', { error: 'The page does not exist' });
});
};
module.exports = router;
|
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.iago.util
import java.security.cert.X509Certificate
import javax.net.ssl._
// A hostname verifier that accepts all hostnames.
class IgnorantHostnameVerifier extends HostnameVerifier {
def verify(hostname: String, session: SSLSession): Boolean = {
return true
}
}
// A trust manager that does not validate anything, cribbed from finagle.
class IgnorantTrustManager extends X509TrustManager {
def getAcceptedIssuers: Array[X509Certificate] = null
def checkClientTrusted(certs: Array[X509Certificate], authType: String) {
// Do nothing.
}
def checkServerTrusted(certs: Array[X509Certificate], authType: String) {
// Do nothing.
}
}
|
import unittest
import tempfile
import os
import sys
import hashlib
sys.path.insert(0, '..')
import rpTool
#WARNING: Need to copy a version of rpSBML locally
import rpSBML
#WARNING: Also need to copy cache and cache/cid_strc.pickle.gz
import rpCache
class TestRPextractsink(unittest.TestCase):
@classmethod
def setUpClass(self):
rpcache = rpCache.rpCache()
cidstrc = rpcache.getCIDstrc()
self.rpex = rpTool.rpExtractSink()
def test_genSink(self):
with tempfile.TemporaryDirectory() as tmp_output_folder:
self.rpex.genSink(os.path.join('data', 'model.xml'),
os.path.join(tmp_output_folder, 'sink.csv'),
True)
with open(os.path.join(tmp_output_folder, 'sink.csv'), 'rb') as sinkf:
self.assertEqual(hashlib.md5(sinkf.read()).hexdigest(), '3a00b9b8003dba014ea7c07c1534a9d6')
self.rpex.genSink(os.path.join('data', 'model.xml'),
os.path.join(tmp_output_folder, 'sink.csv'),
False)
with open(os.path.join(tmp_output_folder, 'sink.csv'), 'rb') as sinkf:
self.assertEqual(hashlib.md5(sinkf.read()).hexdigest(), '3a00b9b8003dba014ea7c07c1534a9d6')
|
#!/usr/bin/python
"""
A simple piece of python code that could convert the Brat annotation
tool format into Event Mention Detection format for easy evaluation. For
detailed features and usage please refer to the README file.
Author: Zhengzhong Liu ( [email protected] )
"""
import argparse
import logging
import sys
import os
import errno
import re
bratSpanMarker = "T"
bratEventMarker = "E"
bratAttMarker = "A"
bratAttMarkerBack = "M" # for backward compatibility
bratRelationMarker = "R"
outputCommentMarker = "#"
outputBodMarker = "#BeginOfDocument" # mark begin of a document
outputEodMarker = "#EndOfDocument" # mark end of a document
outputRelationMarker = "@" # append before relation
coreference_relation_name = "Coreference" # mark coreference
coreference_cluster_prefix = "C"
missingAttributePlaceholder = "NOT_ANNOTATED"
text_bounds = {} # all text bounds
events = {} # all events
atts = {} # all attributes
rels = {} # all relations
out = "converted"
out_ext = ".tbf" # short for token based format
engine_id = "brat_conversion"
inner_span_joiner = ","
inter_span_joiner = ";"
# brat_annotation_ext = ".tkn.ann"
# token_offset_ext = ".txt.tab" # accroding to LDC2014R55
brat_annotation_ext = ".ann"
token_offset_ext = ".tab" # Simplest extensions
token_offset_fields = [2, 3]
annotation_on_source = False
logger = logging.getLogger()
def main():
global out
global out_ext
global engine_id
global annotation_on_source
global token_offset_ext
global brat_annotation_ext
global token_offset_fields
parser_description = (
"This converter converts Brat annotation files to one single token based event mention description file. It "
"accepts a single file name or a directory name that contains the Brat annotation output. If in toke mode, "
"the converter also requires token offset files that shares the same name with the annotation file, with "
"extension %s. The converter will search for the token file in the directory specified by '-t' argument"
% token_offset_ext)
parser = argparse.ArgumentParser(description=parser_description)
# Required arguments first.
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-d", "--dir", help="directory of the annotation files")
group.add_argument("-f", "--file", help="name of one annotation file")
# Optional arguments now.
parser.add_argument("-t", "--token_path", help="provide directory to search for the corresponding token files if "
"you use the token based format.")
parser.add_argument(
"-o", "--out",
help="output path, '" + out + "' in the current path by default")
parser.add_argument(
"-oe", "--ext",
help="output extension, '" + out_ext + "' by default")
parser.add_argument(
"-i", "--eid",
help="an engine id that will appears at each line of the output "
"file. '" + engine_id + "' will be used by default")
parser.add_argument(
"-w", "--overwrite", help="force overwrite existing output file",
action='store_true')
parser.add_argument(
"-of", "--offset_field", help="A pair of integer indicates which column we should "
"read the offset in the token mapping file, index starts"
"at 0, default value will be %s" % token_offset_fields
)
parser.add_argument(
"-te", "--token_table_extension",
help="any extension appended after docid of token table files. "
"Default is " + token_offset_ext)
parser.add_argument(
"-ae", "--annotation_extension",
help="any extension appended after docid of annotation files. "
"Default is " + brat_annotation_ext)
parser.add_argument(
"-b", "--debug", help="turn debug mode on", action="store_true")
parser.set_defaults(debug=False)
args = parser.parse_args()
stream_handler = logging.StreamHandler(sys.stderr)
logger.addHandler(stream_handler)
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
stream_handler.setFormatter(logging.Formatter('[%(levelname)s] %(asctime)s : %(message)s'))
if args.token_path is not None:
if not os.path.isdir(args.token_path):
logger.error("Token directory does not exists (or is not a directory) \n\n")
parser.print_help()
sys.exit(1)
else:
logger.info("Token directory not provided, will generate character based format.")
if args.token_table_extension is not None:
token_offset_ext = args.token_table_extension
if args.annotation_extension is not None:
brat_annotation_ext = args.annotation_extension
if args.offset_field is not None:
try:
token_offset_fields = [int(x) for x in args.offset_field.split(",")]
except ValueError as _:
logger.error("Should provide two integer with comma in between")
# Set default value to optional arguments.
if args.out is not None:
out = args.out
if args.ext is not None:
out_ext = args.ext
if args.eid is not None:
engine_id = args.eid
# Ensure output directory exists.
try:
head, tail = os.path.split(out)
if head != "":
os.makedirs(head)
except OSError:
(t, v, trace) = sys.exc_info()
if v.errno != errno.EEXIST:
raise
out_path = out + out_ext
if not args.overwrite and os.path.isfile(out_path):
logger.error(
"Output path [%s] already exists, "
"use '-w' flag to force overwrite" % out_path)
sys.exit(1)
out_file = open(out_path, 'w')
if args.dir is not None:
# parse directory
count = 0
for f in os.listdir(args.dir):
if f.endswith(brat_annotation_ext):
parse_annotation_file(
os.path.join(args.dir, f), args.token_path, out_file)
count += 1
logger.info("Finish converting %d files" % count)
elif args.file is not None:
# parse one annotation file
if args.file.endswith(brat_annotation_ext):
parse_annotation_file(args.file, args.token_path, out_file)
else:
logger.error("No annotations provided\n")
def clear():
text_bounds.clear() # all text bounds
events.clear() # all events
atts.clear() # all attributes
rels.clear() # all relations
def chop(s, begin):
if s.startswith(begin):
return s[len(begin):]
return s
def rchop(s, ending):
if s.endswith(ending):
return s[:-len(ending)]
return s
def parse_annotation_file(file_path, token_dir, of):
# Otherwise use the provided directory to search for it.
basename = os.path.basename(file_path)
logger.debug("Processing file " + basename)
is_token_mode = token_dir is not None
if os.path.isfile(file_path):
f = open(file_path)
text_id = rchop(os.path.basename(f.name), brat_annotation_ext)
logger.debug("Document id is " + text_id)
read_all_anno(f)
# Match from text bound to token ids.
if is_token_mode:
token_path = os.path.join(token_dir, basename[:-len(brat_annotation_ext)] + token_offset_ext)
if os.path.isfile(token_path):
token_file = open(token_path)
text_bound_id_2_token_id = get_text_bound_2_token_mapping(token_file)
eids = events.keys()
eids.sort(key=natural_order)
eid2sorted_tokens = {}
# Check relations.
filtered_rels = {}
for rel_name, relations in rels.iteritems():
filtered = []
for rel_id, a1, a2 in relations:
if a1 not in eids:
logger.warning("Removing relations %s with invented mention [%s] from doc %s."
% (rel_id, a1, basename))
continue
elif a2 not in eids:
logger.warning("Removing relations %s with invented mention [%s] from doc %s."
% (rel_id, a2, basename))
continue
else:
filtered.append((rel_id, a1, a2))
filtered_rels[rel_name] = filtered
# write begin of document
of.write(outputBodMarker + " " + text_id + "\n")
# write each mention in a line
for eid in eids:
event_type = events[eid][0][0]
text_bound_id = events[eid][0][1]
realis_status = missingAttributePlaceholder
if eid in atts:
att = atts[eid]
if "Realis" in att:
realis_status = att["Realis"][1]
text_bound = text_bounds[text_bound_id]
if is_token_mode and text_bound_id not in text_bound_id_2_token_id:
logger.warning("Cannot find corresponding token for text bound [%s] - [%s] in document [%s]" %
(text_bound_id, text_bound[2], text_id))
logger.warning("The corresponding text bound will be ignored")
continue
if is_token_mode:
span_tuple = tuple(sorted(text_bound_id_2_token_id[text_bound_id], key=natural_order))
span_str = inner_span_joiner.join(span_tuple)
else:
span_tuple = tuple(text_bounds[text_bound_id][1])
span_str = inter_span_joiner.join(
str(span[0]) + inner_span_joiner + str(span[1]) for span in span_tuple)
text = text_bound[2]
eid2sorted_tokens[eid] = tuple(span_tuple)
of.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (
engine_id, text_id, eid, span_str, text, event_type, realis_status))
for rel_name, relations in filtered_rels.iteritems():
if rel_name != coreference_relation_name:
for relation in relations:
if relation[1] in eids and relation[2] in eids:
of.write(
"%s%s\t%s\t%s,%s\n" % (
outputRelationMarker, rel_name, relation[0], relation[1],
relation[2]))
else:
logger.debug("Resolving coreference")
resolved_coref_chains = resolve_transitive_closure_and_duplicates(relations, eid2sorted_tokens)
for chain in resolved_coref_chains:
of.write("%s%s\t%s%s\t%s\n" % (
outputRelationMarker, rel_name, coreference_cluster_prefix, chain[0], ",".join(chain[1])))
# write end of sentence
of.write(outputEodMarker + "\n")
else:
# the missing file will be skipped but others will still be done
logger.error("Annotation path %s not found. Will still try to process other annotation files." % file_path)
clear()
def transitive_merge(clusters):
merged_clusters = []
merged_i = -1
merged_j = -1
for i in range(0, len(clusters) - 1):
for j in range(i + 1, len(clusters)):
if len(clusters[i].intersection(clusters[j])) != 0:
union = clusters[i].union(clusters[j])
merged_clusters.append(union)
merged_i = i
merged_j = j
break
else:
continue
break
if merged_i != -1:
merged_clusters.extend(clusters[: merged_i])
merged_clusters.extend(clusters[merged_i + 1: merged_j])
merged_clusters.extend(clusters[merged_j + 1:])
else:
merged_clusters.extend(clusters)
return merged_clusters
def natural_order(key):
convert = lambda text: int(text) if text.isdigit() else text
return [convert(c) for c in re.split('([0-9]+)', key)]
def resolve_transitive_closure_and_duplicates(coref_relations, eid2span):
"""
Resolve
:param coref_relations: Raw coreference relation string read from annotation file.
:param eid2span: Map from event id to its span representation.
:return:
"""
clusters = []
for coref_rel in coref_relations:
mention1 = coref_rel[1]
mention2 = coref_rel[2]
for raw_cluster_mentions in clusters:
if mention1 in raw_cluster_mentions or mention2 in raw_cluster_mentions:
raw_cluster_mentions.add(mention1)
raw_cluster_mentions.add(mention2)
break
else:
new_cluster = {mention1, mention2}
clusters.append(new_cluster)
while True:
merged = transitive_merge(clusters)
if len(merged) == len(clusters):
# if no new merges found
break
clusters = merged
# add some cluster id and check for mention span duplicates
clusters_with_id = []
id = 0
for raw_cluster_mentions in clusters:
cluster_span_control = set()
deduplicated_cluster_mentions = []
for mention in raw_cluster_mentions:
span = eid2span[mention]
if span not in cluster_span_control:
deduplicated_cluster_mentions.append(mention)
else:
logger.warning("Removing duplicated annotations from annotation file: [%s]" % mention)
cluster_span_control.add(span)
clusters_with_id.append((id, deduplicated_cluster_mentions))
id += 1
return clusters_with_id
def get_text_bound_2_token_mapping(token_file):
text_bound_id_2_token_id = {}
is_first_line = True
for tokenLine in token_file:
# We assume no whitespaces within fields.
fields = tokenLine.rstrip().split("\t")
if len(fields) <= token_offset_fields[1]:
if is_first_line:
# The first one might just be a header.
logger.info("Ignoring the token file header.")
else:
logger.error("Token files only have %s fields, are you setting "
"the correct token offset fields?" % len(fields))
exit(1)
is_first_line = False
# Important! we need to make sure that which offsets we are based on.
token_span = (int(fields[token_offset_fields[0]]), int(fields[token_offset_fields[1]]))
# One token maps to multiple text bound is possible
for text_bound_id in find_corresponding_text_bound(token_span):
if text_bound_id not in text_bound_id_2_token_id:
text_bound_id_2_token_id[text_bound_id] = []
text_bound_id_2_token_id[text_bound_id].append(fields[0])
return text_bound_id_2_token_id
def find_corresponding_text_bound(token_span):
text_bound_ids = []
for text_bound_id, text_bound in text_bounds.iteritems():
for ann_span in text_bound[1]:
if covers(ann_span, token_span):
text_bound_ids.append(text_bound_id)
elif covers(token_span, ann_span):
text_bound_ids.append(text_bound_id)
return text_bound_ids
def covers(covering_span, covered_span):
if covering_span[0] <= covered_span[0] and covering_span[1] >= covered_span[1]:
return True
return False
def parse_span(all_span_str):
span_strs = all_span_str.split(";")
spans = []
for span_str in span_strs:
span = span_str.split()
spans.append((int(span[0]), int(span[1])))
return spans
def parse_text_bound(fields):
if len(fields) != 3:
logger.error(
"Incorrect number of fields in a text bound annotation, the process will try "
"to continue but you should check the ann input.")
logger.error(fields)
tid = fields[0]
type_span = fields[1].split(" ", 1)
tb_type = type_span[0]
spans = parse_span(type_span[1])
if len(fields) < 3:
return tid, (tb_type, spans, "-") # partial hack to avoid incorrect field problem.
text = fields[2]
return tid, (tb_type, spans, text)
def parse_event(fields):
eid = fields[0]
trigger_and_roles = fields[1].split()
trigger = trigger_and_roles[0].split(":")
roles = []
for rolesStr in trigger_and_roles[1:]:
role = rolesStr.split(":")
roles.append(role)
return eid, (trigger, roles)
def parse_attribute(fields):
aid = fields[0]
value = fields[1].split()
att_name = value[0]
target_id = value[1]
target_value = True # binary
if len(value) == 3: # multi-valued
target_value = value[2]
return aid, target_id, att_name, target_value
def parse_relation(fields):
"""
Assumes all relation are binary, argument names are discarded
:param fields: correspond to one Brat line seperated by tab
:return: relation id, relation name, arg1 and arg2
"""
rel, a1, a2 = fields[1].split(" ")
rel_id = fields[0]
return rel_id, rel, a1.split(":")[1], a2.split(":")[1]
def read_all_anno(f):
for line in f:
if line.startswith(outputCommentMarker):
pass
fields = line.rstrip().split("\t", 2)
if line.startswith(bratSpanMarker):
text_bound = parse_text_bound(fields)
text_bounds[text_bound[0]] = text_bound[1]
if line.startswith(bratEventMarker):
event = parse_event(fields)
events[event[0]] = event[1]
if line.startswith(bratAttMarker) or line.startswith(bratAttMarkerBack):
aid, target_id, att_name, target_value = parse_attribute(fields)
if target_id in atts:
atts[target_id][att_name] = (aid, target_value)
else:
atts[target_id] = {}
atts[target_id][att_name] = (aid, target_value)
if line.startswith(bratRelationMarker):
rel_id, rel_name, a1, a2 = parse_relation(fields)
if rel_name not in rels:
rels[rel_name] = []
rels[rel_name].append((rel_id, a1, a2))
if __name__ == "__main__":
main()
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { MatCardTitle, MatCardContent ,MatCard } from '@angular/material';
import {FormBuilder, FormGroup, Validators ,ReactiveFormsModule,FormsModule} from '@angular/forms';
import { Router, RouterModule } from '@angular/router';
import { RouterTestingModule } from '@angular/router/testing';
import {Http, HttpModule} from '@angular/http';
import { SettingsComponent } from './settings.component';
import {PsersonService} from './person.service';
import { UserService } from '../providers/user.service';
describe('SettingsComponent', () => {
let component: SettingsComponent;
let fixture: ComponentFixture<SettingsComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [FormsModule,ReactiveFormsModule,RouterModule,RouterTestingModule, HttpModule],
declarations: [ SettingsComponent, MatCardTitle, MatCardContent ,MatCard],
providers : [PsersonService, UserService]
})
.compileComponents();
}));
beforeEach(() => {
// fixture = TestBed.createComponent(SettingsComponent);
// component = fixture.componentInstance;
// fixture.detectChanges();
});
it('should create', () => {
expect(true).toBeTruthy();
});
it('should create user profile data', () => {
expect(true).toBeTruthy();
});
it('change profile data', () => {
expect(true).toBeTruthy();
});
it('invalid data check', () => {
expect(true).toBeTruthy();
});
});
|
package types
import (
"testing"
)
func TestCoins(t *testing.T) {
coins := Coins{
Coin{"GAS", 1},
Coin{"MINERAL", 1},
Coin{"TREE", 1},
}
if !coins.IsValid() {
t.Fatal("Coins are valid")
}
if !coins.IsPositive() {
t.Fatalf("Expected coins to be positive: %v", coins)
}
negCoins := coins.Negative()
if negCoins.IsPositive() {
t.Fatalf("Expected neg coins to not be positive: %v", negCoins)
}
sumCoins := coins.Plus(negCoins)
if len(sumCoins) != 0 {
t.Fatal("Expected 0 coins")
}
}
func TestCoinsBadSort(t *testing.T) {
coins := Coins{
Coin{"TREE", 1},
Coin{"GAS", 1},
Coin{"MINERAL", 1},
}
if coins.IsValid() {
t.Fatal("Coins are not sorted")
}
}
func TestCoinsBadAmount(t *testing.T) {
coins := Coins{
Coin{"GAS", 1},
Coin{"TREE", 0},
Coin{"MINERAL", 1},
}
if coins.IsValid() {
t.Fatal("Coins cannot include 0 amounts")
}
}
func TestCoinsDuplicate(t *testing.T) {
coins := Coins{
Coin{"GAS", 1},
Coin{"GAS", 1},
Coin{"MINERAL", 1},
}
if coins.IsValid() {
t.Fatal("Duplicate coin")
}
}
|
use strict;
use warnings;
use utf8;
use Test::More;
use Kossy;
subtest 'JSON' => sub {
is_deeply make_request('application/json', '{"hoge":"fuga"}')->parameters()->as_hashref_multi, {
hoge => ['fuga'],
};
};
subtest 'UrlEncoded' => sub {
is_deeply make_request('application/x-www-form-urlencoded', 'xxx=yyy')->parameters()->as_hashref_multi, {
xxx => ['yyy'],
};
};
subtest 'MultiPart' => sub {
my $content = <<'...';
--BOUNDARY
Content-Disposition: form-data; name="xxx"
Content-Type: text/plain
yyy
--BOUNDARY
Content-Disposition: form-data; name="yappo"; filename="osawa.txt"
Content-Type: text/plain
SHOGUN
--BOUNDARY--
...
$content =~ s/\r\n/\n/g;
$content =~ s/\n/\r\n/g;
my $req = make_request('multipart/form-data; boundary=BOUNDARY', $content);
is_deeply $req->parameters()->as_hashref_multi, {
xxx => ['yyy'],
};
is slurp($req->upload('yappo')), 'SHOGUN';
is $req->upload('yappo')->filename, 'osawa.txt';
isa_ok $req->upload('yappo')->headers, 'HTTP::Headers';
};
subtest 'OctetStream' => sub {
my $content = 'hogehoge';
my $req = make_request('application/octet-stream', $content);
is $req->content, 'hogehoge';
is 0+($req->parameters->keys), 0;
is 0+($req->uploads->keys), 0;
};
done_testing;
sub make_request {
my ($content_type, $content) = @_;
open my $input, '<', \$content;
my $req = Kossy::Request->new(
+{
'psgi.input' => $input,
CONTENT_TYPE => $content_type,
CONTENT_LENGTH => length($content),
QUERY_STRING => '',
'kossy.request.parse_json_body' => 1,
},
);
return $req;
}
sub slurp {
my $up = shift;
open my $fh, "<", $up->path or die "$!";
scalar do { local $/; <$fh> };
}
|
export const isNotEmpty = (
input: String,
message: string = 'Required',
): string | undefined => (!input ? message : undefined)
export const returnFilter = (
oldMap: {[key: number]: string},
selectedIdentifier: number,
selectedValue: string,
): {[key: number]: string} => {
let filterMap = {
...oldMap,
}
if (!!filterMap[selectedIdentifier]) {
delete filterMap[selectedIdentifier]
} else {
filterMap[selectedIdentifier] = selectedValue
}
return filterMap
}
|
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using DevFramework.Core.Infrastructure.Domain;
namespace DevFramework.Domain.Model
{
public class Request : BaseEntityWithTypeId<long>
{
public int UserId { get; set; }
public long DistanceId { get; set; }
public User User { get; set; }
public Distance Distance { get; set; }
}
} |
package com.angcyo.widget.text
import android.content.Context
import android.graphics.Canvas
import android.util.AttributeSet
import android.view.MotionEvent
import android.view.View
import androidx.annotation.ColorInt
import androidx.appcompat.widget.AppCompatTextView
import com.angcyo.widget.R
import com.angcyo.widget.base.spans
import com.angcyo.widget.span.IDrawableSpan
import com.angcyo.widget.span.IWeightSpan
/**
* 自定义Span支持类
* Email:[email protected]
* @author angcyo
* @date 2020/01/08
*/
open class DslSpanTextView : AppCompatTextView {
//drawable 额外的状态
val _extraState = mutableListOf<Int>()
var isInitExtraState: Boolean = false
var maxLineDelegate = MaxLineDelegate()
constructor(context: Context) : super(context) {
initAttribute(context, null)
}
constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
initAttribute(context, attrs)
}
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(
context,
attrs,
defStyleAttr
) {
initAttribute(context, attrs)
}
private fun initAttribute(context: Context, attributeSet: AttributeSet?) {
val typedArray = context.obtainStyledAttributes(attributeSet, R.styleable.DslSpanTextView)
maxLineDelegate.apply {
maxShowLine = typedArray.getInt(R.styleable.DslSpanTextView_r_max_line, maxShowLine)
moreText = typedArray.getString(R.styleable.DslSpanTextView_r_more_text) ?: moreText
moreTextColor =
typedArray.getColor(R.styleable.DslSpanTextView_r_more_text_color, moreTextColor)
foldTextColor =
typedArray.getColor(R.styleable.DslSpanTextView_r_fold_text_color, foldTextColor)
foldText = typedArray.getString(R.styleable.DslSpanTextView_r_fold_text) ?: foldText
installSpanClickMethod = typedArray.getBoolean(
R.styleable.DslSpanTextView_r_install_span_click_method,
installSpanClickMethod
)
setMaxShowLine(this@DslSpanTextView, maxShowLine)
}
typedArray.recycle()
}
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
super.onLayout(changed, left, top, right, bottom)
maxLineDelegate.checkMaxShowLine(this)
}
override fun onSizeChanged(w: Int, h: Int, oldw: Int, oldh: Int) {
super.onSizeChanged(w, h, oldw, oldh)
}
/**设置最大显示行数*/
fun setMaxShowLine(line: Int) {
maxLineDelegate.setMaxShowLine(this, line)
}
/**原始文本*/
fun getOriginText(): CharSequence? =
if (isEnableFoldLine()) maxLineDelegate._originText else text
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
}
override fun dispatchTouchEvent(event: MotionEvent?): Boolean {
return super.dispatchTouchEvent(event)
}
/**此方法, 会在系统[TextView]初始化时, 就触发. 此时此类的成员并未初始化, 所以NPE*/
override fun setText(text: CharSequence?, type: BufferType?) {
val bufferType = if (isEnableFoldLine()) {
if (type == null || type == BufferType.NORMAL) {
BufferType.SPANNABLE
} else {
type
}
} else {
type
}
super.setText(text, bufferType)
}
/**是否激活了折叠行显示*/
fun isEnableFoldLine() = maxLineDelegate != null && maxLineDelegate.maxShowLine > 0
override fun onCreateDrawableState(extraSpace: Int): IntArray {
if (!isInitExtraState) {
return super.onCreateDrawableState(extraSpace)
}
val state = super.onCreateDrawableState(extraSpace + _extraState.size)
if (_extraState.isNotEmpty()) {
View.mergeDrawableStates(state, _extraState.toIntArray())
}
return state
}
override fun drawableStateChanged() {
super.drawableStateChanged()
val state = onCreateDrawableState(0)
//设置内置span的状态
spans { _, span ->
if (span is IDrawableSpan) {
span.setDrawableState(state)
}
}
}
fun setDrawableColor(@ColorInt color: Int) {
//设置内置span的颜色
spans { _, span ->
if (span is IDrawableSpan) {
span.setDrawableColor(color)
}
}
invalidate()
}
/**添加额外的状态*/
fun addDrawableState(state: Int) {
isInitExtraState = true
_extraState.add(state)
refreshDrawableState()
}
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
val widthSize = MeasureSpec.getSize(widthMeasureSpec)
val widthMode = MeasureSpec.getMode(widthMeasureSpec)
val heightSize = MeasureSpec.getSize(heightMeasureSpec)
val heightMode = MeasureSpec.getMode(heightMeasureSpec)
_measureWeightSpan(widthSize, heightSize)
super.onMeasure(widthMeasureSpec, heightMeasureSpec)
}
fun _measureWeightSpan(widthSize: Int, heightSize: Int) {
//设置内置span的weight支持
spans { _, span ->
if (span is IWeightSpan) {
val width = widthSize - paddingLeft - paddingRight
val height = heightSize - paddingTop - paddingBottom
span.onMeasure(width, height)
}
}
}
} |
package com.frogsquare.firebase
object Common {
const val RC_GOOGLE: Int = 0x0004
const val RC_FACEBOOK: Int = 0x0005
const val INTENT_REQUEST_ID: Int = 0x0006
const val NOTIFICATION_REQUEST_ID: Int = 0x0007
const val PROGRESS_NOTIFICATION_ID: Int = 0x0008
const val COMPLETE_NOTIFICATION_ID: Int = 0x0009
const val DEFAULT_CHANNEL_ID: String = "default"
const val SHARED_PREFERENCE_NAME: String = "GDFirebase.sharedPreferences"
}
|
# arctan.py
import numpy as np
import torch
from torch import tensor
from adpulses import io, optimizers, metrics, penalties
if __name__ == "__main__":
import sys
if len(sys.argv) <= 1: # mode DEBUG
import os
os.chdir(os.path.dirname(os.path.abspath(__file__)))
m2pName = ('m2p.mat' if len(sys.argv) <= 1 else sys.argv[1])
p2mName = ('p2m.mat' if len(sys.argv) <= 2 else sys.argv[2])
gpuID = ('0' if len(sys.argv) <= 3 else sys.argv[3])
# %% load
if gpuID == '-1':
device, dtype = torch.device('cpu'), torch.float32
else:
device, dtype = torch.device('cuda:'+gpuID), torch.float32
target, cube, pulse, arg = io.m2p(m2pName, device=device, dtype=dtype)
def dflt_arg(k, v, fn):
return (fn(k) if ((k in arg.keys()) and (arg[k].size > 0)) else v)
f_c2r_np = lambda x, a: np.stack((x.real, x.imag), axis=a) # noqa:E731
f_t = (lambda x, device=device, dtype=dtype:
tensor(x[None, ...], device=device, dtype=dtype)) # noqa:E731
arg['doRelax'] = dflt_arg('doRelax', True, lambda k: bool(arg[k].item()))
b1Map = dflt_arg('b1Map', None, lambda k: f_t(f_c2r_np(arg[k], -2)))
b1Map_ = dflt_arg('b1Map_', None, lambda k: f_t(f_c2r_np(arg[k], -2)))
assert ((b1Map_ is None) or (b1Map is None))
arg['b1Map_'] = (b1Map_ if b1Map is None else cube.extract(b1Map))
arg['niter'] = dflt_arg('niter', 8, lambda k: arg[k].item())
arg['niter_gr'] = dflt_arg('niter_gr', 2, lambda k: arg[k].item())
arg['niter_rf'] = dflt_arg('niter_rf', 2, lambda k: arg[k].item())
eta = dflt_arg('eta', 4, lambda k: float(arg[k].item()))
print('eta: ', eta)
err_meth = dflt_arg('err_meth', 'l2xy', lambda k: arg[k].item())
pen_meth = dflt_arg('pen_meth', 'l2', lambda k: arg[k].item())
err_hash = {'null': metrics.err_null, 'l2': metrics.err_l2,
'l2xy': metrics.err_l2xy, 'ml2xy': metrics.err_ml2xy,
'l2z': metrics.err_l2z}
pen_hash = {'null': penalties.pen_null, 'l2': penalties.pen_l2}
fn_err, fn_pen = err_hash[err_meth], pen_hash[pen_meth]
# %% pulse design
kw = {k: arg[k] for k in ('b1Map_', 'niter', 'niter_gr', 'niter_rf',
'doRelax')}
pulse, optInfos = optimizers.arctanLBFGS(target, cube, pulse,
fn_err, fn_pen, eta=eta, **kw)
# %% saving
io.p2m(p2mName, pulse, {'optInfos': optInfos})
|
{-# LANGUAGE Safe #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Protolude.Bool (
whenM
, unlessM
, ifM
, guardM
, bool
, (&&^)
, (||^)
, (<&&>)
, (<||>)
) where
import Data.Bool (Bool(..), (&&), (||))
import Data.Function (flip)
import Control.Applicative(Applicative, liftA2)
import Control.Monad (Monad, MonadPlus, return, when, unless, guard, (>>=), (=<<))
bool :: a -> a -> Bool -> a
bool f t p = if p then t else f
whenM :: Monad m => m Bool -> m () -> m ()
whenM p m =
p >>= flip when m
unlessM :: Monad m => m Bool -> m () -> m ()
unlessM p m =
p >>= flip unless m
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM p x y = p >>= \b -> if b then x else y
guardM :: MonadPlus m => m Bool -> m ()
guardM f = guard =<< f
-- | The '||' operator lifted to a monad. If the first
-- argument evaluates to 'True' the second argument will not
-- be evaluated.
infixr 2 ||^ -- same as (||)
(||^) :: Monad m => m Bool -> m Bool -> m Bool
(||^) a b = ifM a (return True) b
infixr 2 <||>
-- | '||' lifted to an Applicative.
-- Unlike '||^' the operator is __not__ short-circuiting.
(<||>) :: Applicative a => a Bool -> a Bool -> a Bool
(<||>) = liftA2 (||)
{-# INLINE (<||>) #-}
-- | The '&&' operator lifted to a monad. If the first
-- argument evaluates to 'False' the second argument will not
-- be evaluated.
infixr 3 &&^ -- same as (&&)
(&&^) :: Monad m => m Bool -> m Bool -> m Bool
(&&^) a b = ifM a b (return False)
infixr 3 <&&>
-- | '&&' lifted to an Applicative.
-- Unlike '&&^' the operator is __not__ short-circuiting.
(<&&>) :: Applicative a => a Bool -> a Bool -> a Bool
(<&&>) = liftA2 (&&)
{-# INLINE (<&&>) #-}
|
---
name: Nocoiner Tears
image: /assets/images/nocoiner-tears-mug.jpg
category: Mug
link: https://mfmerch.com/products/nocoiner-tears-mug
price: $18.00
---
|
<?php
namespace Wardenyarn\MawiApi;
use Wardenyarn\MawiApi\Exceptions\MawiApiException;
trait Documents
{
public function getDocuments($params = [])
{
return $this->getAll('/integration/xml/documents', 'document', $params);
}
public function getDocument(int $id)
{
return $this->apiCall('/integration/xml/document', ['id' => $id]);
}
public function downloadDocument(int $id, $file_dir)
{
$file_dir = rtrim($file_dir, '/');
if (! is_writable($file_dir)) {
throw new MawiApiException(sprintf('Document download directory "%s" is not writable', $file_dir));
}
$document = $this->getDocument($id);
$remote_file = urldecode($document->href);
$filename = basename(explode('?id=', $remote_file)[0]);
$local_file = fopen($file_dir.'/'.$filename, 'w');
$body = $this->http->get($remote_file, ['sink' => $local_file]);
fclose($local_file);
return $body->getStatusCode() === 200;
}
} |
from django.urls import path
from results_viewer.views import get_results_values
urlpatterns = [
path('results/<int:id>/values/', get_results_values),
] |
create or replace function graphql.primary_key_columns(entity regclass)
returns text[]
language sql
immutable
as
$$
select
coalesce(array_agg(pg_attribute.attname::text order by attrelid asc), '{}')
from
pg_index
join pg_attribute
on pg_attribute.attrelid = pg_index.indrelid
and pg_attribute.attnum = any(pg_index.indkey)
where
pg_index.indrelid = entity
and pg_index.indisprimary
$$;
|
export interface Match {
confidence: number;
name: string;
lang: string;
}
// @ts-ignore
export default (det, rec, confidence, name, lang): Match => ({
confidence,
name: name || rec.name(det),
lang,
});
|
/*=========================================================================
Program: ParaView
Module: pqContextMenuInterface.h
Copyright (c) 2005,2006 Sandia Corporation, Kitware Inc.
All rights reserved.
ParaView is a free software; you can redistribute it and/or modify it
under the terms of the ParaView license version 1.2.
See License_v1.2.txt for the full ParaView license.
A copy of this license can be obtained by contacting
Kitware Inc.
28 Corporate Drive
Clifton Park, NY 12065
USA
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
========================================================================*/
#ifndef pqContextMenuInterface_h
#define pqContextMenuInterface_h
#include "pqComponentsModule.h"
#include <QList>
#include <QObject>
/**
* @class pqContextMenuInterface
* @brief Interface class for plugins that add a context menu to pqViews.
*
* pqContextMenuInterface is the interface which plugins adding a context menu
* to pqViews should implement. One would typically use the `add_paraview_context_menu`
* CMake macro to ensure an instance of the class is created and registered with
* the pqPipelineContextMenuBehavior class (which is responsible for creating
* the context menu).
*/
class QMenu;
class pqView;
class pqRepresentation;
class PQCOMPONENTS_EXPORT pqContextMenuInterface
{
public:
pqContextMenuInterface();
virtual ~pqContextMenuInterface();
/// This method is called when a context menu is requested,
/// usually by a right click in a pqView instance.
///
/// This method should return true if (a) the context is one
/// handled by this instance (and presumably it will modify
/// the provided QMenu); and (b) this instance should be the
/// last interface to contribute to the menu.
/// Returning false indicates the context is not one this
/// instance handles *or* that interfaces with a lower priority
/// may modify the menu.
///
/// Each registered interface is called in order of descending
/// priority until one returns true, so your implementation
/// should return false as quickly as possible.
///
/// If dataContext is a pqDataRepresentation and holds
/// multiblock data, the dataBlockContext is a list of
/// block IDs to which the menu actions should apply.
virtual bool contextMenu(QMenu* menu, pqView* viewContext, const QPoint& viewPoint,
pqRepresentation* dataContext, const QList<unsigned int>& dataBlockContext) const = 0;
/// This method's return value is used to set the precedence of the interface.
///
/// Interfaces with greater priority are invoked before others and may cause
/// menu-building to terminate early.
/// ParaView's default context-menu interface uses a priority of 0 and returns false.
///
/// If you wish to modify the default menu, assign a negative priority to your interface.
/// If you wish to override the default menu, assign a positive priority to your interface
/// and have `contextMenu()` return true.
virtual int priority() const { return -1; }
private:
Q_DISABLE_COPY(pqContextMenuInterface)
};
Q_DECLARE_INTERFACE(pqContextMenuInterface, "com.kitware/paraview/contextmenu")
#endif
|
# Vault-Ecto Sample
## Overview
This repo is to keep track of the latest state of using Hashicorp Vault for as
the source of short-term credentials to a Postgres database that is then used by
Ecto in an Elixir application.
## Components
### docker-compose.yml
The easiest way to ensure consistent environment for both Vault and Postgres is
to create a Docker network and containers for both.
### Bootstrap Configuration Scripts
We need to bootstrap Vault to 1) connect to Postgres, 2) issue short-lived
credentials, 3) revoke credentials. The standard Hashicorp Vault docs have
[instructions](https://learn.hashicorp.com/vault/developer/sm-dynamic-secrets)
but, here, we're just interested in ensuring it's configured.
### Sample Elixir + Ecto Application
We need a test bed for experimenting with Ecto using the limited credentials
issued by Vault.
## Latest state of Ecto/Vault readiness
### `ecto.create`
Since we're given a temporary username and password, and we want to ensure the
database is accessible each time, we should pre-create the database rather than
using `mix ecto.create` to do so. Later runs of `mix ecto.create` should
indicate that the database is already created.
### `ecto.drop`
Since we have a temporary username and password, we don't want it to have
permissions to drop the database. Deleting the database should be a deliberate
action outside of our application. Running `mix ecto.drop` should fail.
### `ecto.migrate`
Not yet tested.
### `Repo.init/2`
We have to talk to Vault in the `Repo.init/2` to get the temporary credentials.
We currently have a hack in place to ensure that when we run the Ecto mix tasks
that the necessary dependencies are started to ensure we _can_ talk to Vault.
### connection pool
We have some issues here. We don't have a clean way drain the connection pool,
re-retrive the configuration from `Repo.init/2`, and then spin the connection
pool back up. Early evidence indicates the connections can stay live past the
password expiration, but the pool worker will fail when it attempts to reconnect
and will never re-request its configuration.
## Running
Set up the Vault and Postgres environments with
bin/setup
|
using System;
using Foundation;
using JavaScriptCore;
namespace XamarinMac {
[Protocol()]
interface IJsExporter: IJSExport {
[Export("fetchMountedVolumes:")]
void FetchMountedVolumes(JSValue jsOptions);
}
public class MyNativeBridge: NSObject, IJsExporter {
JSContext jsContext { get; set; }
public MyNativeBridge(JSContext newContext) : base() {
jsContext = newContext;
}
public void FetchMountedVolumes(JSValue jsOptions) {
NSMutableArray volumes = GetMountedVolumes();
JSValue[] args = new JSValue[] {JSValue.From(volumes, jsContext)};
// not callable see Bug #17550
// https://bugzilla.xamarin.com/show_bug.cgi?id=17550
JSValue jsValueCallback = jsOptions.GetProperty("callback");
// work-around for #17550
JSValue workAroundCallback = jsContext[(NSString)"render"];
workAroundCallback.Call(args);
}
public NSMutableArray GetMountedVolumes() {
NSMutableArray volumeList = new NSMutableArray();
NSArray volKeys = NSArray.FromNSObjects(
NSUrl.VolumeLocalizedNameKey,
NSUrl.VolumeTotalCapacityKey,
NSUrl.VolumeAvailableCapacityKey,
NSUrl.VolumeIsBrowsableKey,
NSUrl.VolumeURLKey,
NSUrl.VolumeUUIDStringKey
);
NSFileManager fileManager = new NSFileManager();
NSUrl[] volumeUrls = fileManager.GetMountedVolumes(volKeys, NSVolumeEnumerationOptions.None);
NSByteCountFormatter byteFormatter = new NSByteCountFormatter();
byteFormatter.CountStyle = NSByteCountFormatterCountStyle.File;
foreach(NSUrl volumeUrl in volumeUrls) {
NSError volUrlError;
NSObject volName;
NSObject volIdentifer;
NSObject volBrowsable;
NSObject volBytesAvailable;
NSObject volBytesTotal;
volumeUrl.TryGetResource(NSUrl.VolumeLocalizedNameKey, out volName, out volUrlError);
volumeUrl.TryGetResource(NSUrl.VolumeURLKey, out volIdentifer, out volUrlError);
volumeUrl.TryGetResource(NSUrl.VolumeIsBrowsableKey, out volBrowsable, out volUrlError);
volumeUrl.TryGetResource(NSUrl.VolumeAvailableCapacityKey, out volBytesAvailable, out volUrlError);
volumeUrl.TryGetResource(NSUrl.VolumeTotalCapacityKey, out volBytesTotal, out volUrlError);
NSNumber volBytesAvailableNum = (NSNumber)volBytesAvailable;
NSNumber volBytesTotalNum = (NSNumber)volBytesTotal;
byteFormatter.IncludesUnit = false;
byteFormatter.IncludesCount = true;
var volBytesAvailableCount = byteFormatter.Format(volBytesAvailableNum.LongValue);
var volBytesTotalCount = byteFormatter.Format(volBytesTotalNum.LongValue);
byteFormatter.IncludesUnit = true;
byteFormatter.IncludesCount = false;
var volBytesAvailableUnit = byteFormatter.Format(volBytesAvailableNum.LongValue);
var volBytesTotalUnit = byteFormatter.Format(volBytesTotalNum.LongValue);
NSNumber browsable = (NSNumber)volBrowsable;
if (browsable.BoolValue) {
volumeList.Add(new NSDictionary(
"name", volName,
"id", volIdentifer,
"bytesAvailableCount", volBytesAvailableCount,
"bytesAvailableUnit", volBytesAvailableUnit,
"bytesTotalCount", volBytesTotalCount,
"bytesTotalUnit", volBytesTotalUnit
));
}
}
return volumeList;
}
}
}
|
<#PSScriptInfo
.VERSION 1.6
.GUID 32a11a36-f91c-4241-a11f-af0cf3e90f38
.AUTHOR [email protected]
.Edited Blesson John
.COMPANYNAME
.COPYRIGHT
.TAGS
.LICENSEURI
.PROJECTURI
.ICONURI
.EXTERNALMODULEDEPENDENCIES
.REQUIREDSCRIPTS
.EXTERNALSCRIPTDEPENDENCIES
.RELEASENOTES
#>
<#
.DESCRIPTION
This script is used during unsattended installs or to download kubectl on windows
#>
param(
$Downloadlocation = $env:TEMP
)
if (!(Test-Path $Downloadlocation))
{
New-Item -ItemType Directory $Downloadlocation -ErrorAction SilentlyContinue | out-null
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::tls12
$uri = "https://kubernetes.io/docs/tasks/tools/install-kubectl/"
Write-Host -ForegroundColor White "==>Getting download link from $uri"
$req = Invoke-WebRequest -UseBasicParsing -Uri $uri
try
{
Write-Host -ForegroundColor White "==>analyzing Downloadlink"
$downloadlink = ($req.Links | where href -Match "kubectl.exe").href
}
catch
{
Write-Warning "Error Parsing Link"
Break
}
Write-Host -ForegroundColor White "==>starting Download from $downloadlink using Bitstransfer"
Start-BitsTransfer $downloadlink -DisplayName "Getting KubeCTL from $downloadlink" -Destination $Downloadlocation
$Downloadfile = Join-Path $Downloadlocation "kubectl.exe"
Unblock-File $Downloadfile
Write-Host -ForegroundColor White "==>starting '$Downloadfile version'"
.$Downloadfile version
$Kube_Local = New-Item -ItemType directory "$($HOME)/.kube" -force
Write-Host
Write-Host -ForegroundColor Magenta "You can now start kubectl from $Downloadfile
copy your remote kubernetes cluster information to $($Kube_Local.fullname)/config"
|
#include<iostream>
using namespace std;
class Node{
public:
int data;
Node* next;
Node(int d){
data =d;
next =NULL;
}
};
class stack{
public:
Node* head=NULL;
void push(int data){
Node* n = new Node(data);
n->next = head;
head =n;
}
Node* begin(){
return head;
}
void pop(){
Node* temp = head;
head = head->next;
temp->next = NULL;
delete temp;
}
int top(){
return head->data;
}
void botom(int data){
Node* temp = begin();
if(!empty()){
}
}
bool empty(){
return (head == NULL);
}
};
int main(){
stack s;
s.push(5);
s.push(4);
s.push(3);
s.push(6);
s.pop();
Node* temp = s.begin();
while(temp!=NULL){
cout<<temp->data<<" ";
temp=temp->next;
}
return 0;
} |
/* Javascript eval */
var myfoo = eval("{ foo: 42 }");
result = eval("4*10+2")==42 && myfoo.foo==42;
//eval shall not alter calling global scope
var myfoo2 = eval("var myfoo; myfoo = {x:'test'}");
assert (myfoo.foo == 42, "myfoo should not be altered");
assert (myfoo2.x == "test", "myfoo2.x == \"test\"");
|
/**
* The MIT License (MIT)
*
* Copyright (c) 2019 vk.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// *********************************************************************
// THIS FILE IS AUTO GENERATED!
// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING.
// *********************************************************************
package com.vk.sdk.api.polls
import com.google.gson.reflect.TypeToken
import com.vk.api.sdk.requests.VKRequest
import com.vk.dto.common.id.UserId
import com.vk.sdk.api.GsonHolder
import com.vk.sdk.api.NewApiRequest
import com.vk.sdk.api.base.dto.BaseBoolInt
import com.vk.sdk.api.base.dto.BaseOkResponse
import com.vk.sdk.api.base.dto.BaseUploadServer
import com.vk.sdk.api.polls.dto.PollsBackground
import com.vk.sdk.api.polls.dto.PollsCreateBackgroundId
import com.vk.sdk.api.polls.dto.PollsEditBackgroundId
import com.vk.sdk.api.polls.dto.PollsGetByIdNameCase
import com.vk.sdk.api.polls.dto.PollsGetVotersNameCase
import com.vk.sdk.api.polls.dto.PollsPoll
import com.vk.sdk.api.polls.dto.PollsVoters
import com.vk.sdk.api.users.dto.UsersFields
import kotlin.Boolean
import kotlin.Int
import kotlin.String
import kotlin.collections.List
class PollsService {
/**
* Adds the current user's vote to the selected answer in the poll.
*
* @param pollId - Poll ID.
* @param answerIds
* @param ownerId - ID of the user or community that owns the poll. Use a negative value to
* designate a community ID.
* @param isBoard
* @return [VKRequest] with [BaseBoolInt]
*/
fun pollsAddVote(
pollId: Int,
answerIds: List<Int>,
ownerId: UserId? = null,
isBoard: Boolean? = null
): VKRequest<BaseBoolInt> = NewApiRequest("polls.addVote") {
GsonHolder.gson.fromJson(it, BaseBoolInt::class.java)
}
.apply {
addParam("poll_id", pollId)
addParam("answer_ids", answerIds)
ownerId?.let { addParam("owner_id", it) }
isBoard?.let { addParam("is_board", it) }
}
/**
* Creates polls that can be attached to the users' or communities' posts.
*
* @param question - question text
* @param isAnonymous - '1' - anonymous poll, participants list is hidden,, '0' - public poll,
* participants list is available,, Default value is '0'.
* @param isMultiple
* @param endDate
* @param ownerId - If a poll will be added to a communty it is required to send a negative
* group identifier. Current user by default.
* @param appId
* @param addAnswers - available answers list, for example_ " ["yes","no","maybe"]", There can
* be from 1 to 10 answers.
* @param photoId
* @param backgroundId
* @param disableUnvote
* @return [VKRequest] with [PollsPoll]
*/
fun pollsCreate(
question: String? = null,
isAnonymous: Boolean? = null,
isMultiple: Boolean? = null,
endDate: Int? = null,
ownerId: UserId? = null,
appId: Int? = null,
addAnswers: String? = null,
photoId: Int? = null,
backgroundId: PollsCreateBackgroundId? = null,
disableUnvote: Boolean? = null
): VKRequest<PollsPoll> = NewApiRequest("polls.create") {
GsonHolder.gson.fromJson(it, PollsPoll::class.java)
}
.apply {
question?.let { addParam("question", it) }
isAnonymous?.let { addParam("is_anonymous", it) }
isMultiple?.let { addParam("is_multiple", it) }
endDate?.let { addParam("end_date", it) }
ownerId?.let { addParam("owner_id", it) }
appId?.let { addParam("app_id", it) }
addAnswers?.let { addParam("add_answers", it) }
photoId?.let { addParam("photo_id", it) }
backgroundId?.let { addParam("background_id", it.value) }
disableUnvote?.let { addParam("disable_unvote", it) }
}
/**
* Deletes the current user's vote from the selected answer in the poll.
*
* @param pollId - Poll ID.
* @param answerId - Answer ID.
* @param ownerId - ID of the user or community that owns the poll. Use a negative value to
* designate a community ID.
* @param isBoard
* @return [VKRequest] with [BaseBoolInt]
*/
fun pollsDeleteVote(
pollId: Int,
answerId: Int,
ownerId: UserId? = null,
isBoard: Boolean? = null
): VKRequest<BaseBoolInt> = NewApiRequest("polls.deleteVote") {
GsonHolder.gson.fromJson(it, BaseBoolInt::class.java)
}
.apply {
addParam("poll_id", pollId)
addParam("answer_id", answerId)
ownerId?.let { addParam("owner_id", it) }
isBoard?.let { addParam("is_board", it) }
}
/**
* Edits created polls
*
* @param pollId - edited poll's id
* @param ownerId - poll owner id
* @param question - new question text
* @param addAnswers - answers list, for example_ , "["yes","no","maybe"]"
* @param editAnswers - object containing answers that need to be edited,, key - answer id,
* value - new answer text. Example_ {"382967099"_"option1", "382967103"_"option2"}"
* @param deleteAnswers - list of answer ids to be deleted. For example_ "[382967099,
* 382967103]"
* @param endDate
* @param photoId
* @param backgroundId
* @return [VKRequest] with [BaseOkResponse]
*/
fun pollsEdit(
pollId: Int,
ownerId: UserId? = null,
question: String? = null,
addAnswers: String? = null,
editAnswers: String? = null,
deleteAnswers: String? = null,
endDate: Int? = null,
photoId: Int? = null,
backgroundId: PollsEditBackgroundId? = null
): VKRequest<BaseOkResponse> = NewApiRequest("polls.edit") {
GsonHolder.gson.fromJson(it, BaseOkResponse::class.java)
}
.apply {
addParam("poll_id", pollId)
ownerId?.let { addParam("owner_id", it) }
question?.let { addParam("question", it) }
addAnswers?.let { addParam("add_answers", it) }
editAnswers?.let { addParam("edit_answers", it) }
deleteAnswers?.let { addParam("delete_answers", it) }
endDate?.let { addParam("end_date", it) }
photoId?.let { addParam("photo_id", it) }
backgroundId?.let { addParam("background_id", it.value) }
}
/**
* @return [VKRequest] with [Unit]
*/
fun pollsGetBackgrounds(): VKRequest<List<PollsBackground>> =
NewApiRequest("polls.getBackgrounds") {
val typeToken = object: TypeToken<List<PollsBackground>>() {}.type
GsonHolder.gson.fromJson<List<PollsBackground>>(it, typeToken)
}
/**
* Returns detailed information about a poll by its ID.
*
* @param pollId - Poll ID.
* @param ownerId - ID of the user or community that owns the poll. Use a negative value to
* designate a community ID.
* @param isBoard - '1' - poll is in a board, '0' - poll is on a wall. '0' by default.
* @param friendsCount
* @param fields
* @param nameCase
* @return [VKRequest] with [PollsPoll]
*/
fun pollsGetById(
pollId: Int,
ownerId: UserId? = null,
isBoard: Boolean? = null,
friendsCount: Int? = null,
fields: List<String>? = null,
nameCase: PollsGetByIdNameCase? = null
): VKRequest<PollsPoll> = NewApiRequest("polls.getById") {
GsonHolder.gson.fromJson(it, PollsPoll::class.java)
}
.apply {
addParam("poll_id", pollId)
ownerId?.let { addParam("owner_id", it) }
isBoard?.let { addParam("is_board", it) }
friendsCount?.let { addParam("friends_count", it) }
fields?.let { addParam("fields", it) }
nameCase?.let { addParam("name_case", it.value) }
}
/**
* @param ownerId
* @return [VKRequest] with [BaseUploadServer]
*/
fun pollsGetPhotoUploadServer(ownerId: UserId? = null): VKRequest<BaseUploadServer> =
NewApiRequest("polls.getPhotoUploadServer") {
GsonHolder.gson.fromJson(it, BaseUploadServer::class.java)
}
.apply {
ownerId?.let { addParam("owner_id", it) }
}
/**
* Returns a list of IDs of users who selected specific answers in the poll.
*
* @param pollId - Poll ID.
* @param answerIds - Answer IDs.
* @param ownerId - ID of the user or community that owns the poll. Use a negative value to
* designate a community ID.
* @param isBoard
* @param friendsOnly - '1' - to return only current user's friends, '0' - to return all users
* (default),
* @param offset - Offset needed to return a specific subset of voters. '0' - (default)
* @param count - Number of user IDs to return (if the 'friends_only' parameter is not set,
* maximum '1000', otherwise '10'). '100' - (default)
* @param fields - Profile fields to return. Sample values_ 'nickname', 'screen_name', 'sex',
* 'bdate (birthdate)', 'city', 'country', 'timezone', 'photo', 'photo_medium', 'photo_big',
* 'has_mobile', 'rate', 'contacts', 'education', 'online', 'counters'.
* @param nameCase - Case for declension of user name and surname_ , 'nom' - nominative
* (default) , 'gen' - genitive , 'dat' - dative , 'acc' - accusative , 'ins' - instrumental ,
* 'abl' - prepositional
* @return [VKRequest] with [Unit]
*/
fun pollsGetVoters(
pollId: Int,
answerIds: List<Int>,
ownerId: UserId? = null,
isBoard: Boolean? = null,
friendsOnly: Boolean? = null,
offset: Int? = null,
count: Int? = null,
fields: List<UsersFields>? = null,
nameCase: PollsGetVotersNameCase? = null
): VKRequest<List<PollsVoters>> = NewApiRequest("polls.getVoters") {
val typeToken = object: TypeToken<List<PollsVoters>>() {}.type
GsonHolder.gson.fromJson<List<PollsVoters>>(it, typeToken)
}
.apply {
addParam("poll_id", pollId)
addParam("answer_ids", answerIds)
ownerId?.let { addParam("owner_id", it) }
isBoard?.let { addParam("is_board", it) }
friendsOnly?.let { addParam("friends_only", it) }
offset?.let { addParam("offset", it) }
count?.let { addParam("count", it) }
val fieldsJsonConverted = fields?.map {
it.value
}
fieldsJsonConverted?.let { addParam("fields", it) }
nameCase?.let { addParam("name_case", it.value) }
}
/**
* @param photo
* @param hash
* @return [VKRequest] with [PollsBackground]
*/
fun pollsSavePhoto(photo: String, hash: String): VKRequest<PollsBackground> =
NewApiRequest("polls.savePhoto") {
GsonHolder.gson.fromJson(it, PollsBackground::class.java)
}
.apply {
addParam("photo", photo)
addParam("hash", hash)
}
}
|
// @flow
const R = require('ramda');
const { PubSub, withFilter } = require('graphql-subscriptions');
const { AmqpPubSub } = require('graphql-rabbitmq-subscriptions');
const { ForbiddenError } = require('apollo-server-express');
const logger = require('../logger');
const sqlClient = require('./sqlClient');
const { query } = require('../util/db');
const environmentSql = require('../resources/environment/sql');
const rabbitmqHost = process.env.RABBITMQ_HOST || 'broker';
const rabbitmqUsername = process.env.RABBITMQ_USERNAME || 'guest';
const rabbitmqPassword = process.env.RABBITMQ_PASSWORD || 'guest';
/* eslint-disable class-methods-use-this */
class LoggerConverter {
child() {
return {
debug: logger.debug,
trace: logger.silly,
error: logger.error,
};
}
error(...args) {
return logger.error.apply(args);
}
debug(...args) {
return logger.debug(args);
}
trace(...args) {
return logger.silly(args);
}
}
/* eslint-enable class-methods-use-this */
const pubSub = new AmqpPubSub({
config: `amqp://${rabbitmqUsername}:${rabbitmqPassword}@${rabbitmqHost}`,
logger: new LoggerConverter(),
});
const createEnvironmentFilteredSubscriber = (events: string[]) => ({
// Allow publish functions to pass data without knowledge of query schema.
resolve: (payload: Object) => payload,
subscribe: async (rootValue: any, args: any, context: any, info: any) => {
const { environment } = args;
const {
credentials: {
role,
permissions: { projects },
},
} = context;
const rows = await query(sqlClient, environmentSql.selectEnvironmentById(environment));
const project = R.path([0, 'project'], rows);
if (role !== 'admin' && !R.contains(String(project), projects)) {
throw new ForbiddenError(`No access to project ${project}.`);
}
const filtered = withFilter(
() => pubSub.asyncIterator(events),
(
payload,
variables,
) => payload.environment === String(variables.environment),
);
return filtered(rootValue, args, context, info);
},
});
module.exports = {
pubSub,
createEnvironmentFilteredSubscriber,
};
|
//*********************************************************************
//xCAD
//Copyright(C) 2020 Xarial Pty Limited
//Product URL: https://www.xcad.net
//License: https://xcad.xarial.com/license/
//*********************************************************************
using SolidWorks.Interop.sldworks;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xarial.XCad;
using Xarial.XCad.Base;
using Xarial.XCad.Base.Enums;
using Xarial.XCad.Documents;
using Xarial.XCad.Enums;
using Xarial.XCad.Features;
using Xarial.XCad.Geometry;
using Xarial.XCad.Geometry.Primitives;
using Xarial.XCad.Geometry.Structures;
using Xarial.XCad.Geometry.Wires;
using Xarial.XCad.Sketch;
using Xarial.XCad.SolidWorks;
using Xarial.XCad.SolidWorks.Documents;
using Xarial.XCad.SolidWorks.Features;
using Xarial.XCad.SolidWorks.Geometry;
using Xarial.XCad.SolidWorks.Geometry.Curves;
using Xarial.XCad.Toolkit;
using Xarial.XCad.Toolkit.Utils;
namespace StandAlone
{
public class MyLogger : IXLogger
{
public void Log(string msg, LoggerMessageSeverity_e severity = LoggerMessageSeverity_e.Information)
{
}
}
class Program
{
static void Main(string[] args)
{
try
{
var app = SwApplicationFactory.Create(Xarial.XCad.SolidWorks.Enums.SwVersion_e.Sw2020,
ApplicationState_e.Default);
//var app = SwApplicationFactory.FromProcess(Process.GetProcessesByName("SLDWORKS").First());
//CustomServices();
//Progress(app);
//SketchSegmentColors(app);
//CreateDrawingView(app);
//var sw = Activator.CreateInstance(Type.GetTypeFromProgID("SldWorks.Application")) as ISldWorks;
//sw.Visible = true;
//var app = SwApplication.FromPointer(sw);
//CreateSketchEntities(app);
//TraverseSelectedFaces(app);
//CreateSweepFromSelection(app);
//CreateTempGeometry(app);
//CreateSweepFromSelection(app);
}
catch
{
}
Console.ReadLine();
}
private static void CustomServices()
{
var app = SwApplicationFactory.PreCreate();
var svcColl = new ServiceCollection();
svcColl.AddOrReplace<IXLogger, MyLogger>();
app.CustomServices = svcColl;
app.Commit();
}
private static void Progress(IXApplication app)
{
using (var prg = app.CreateProgress())
{
for (int i = 0; i < 100; i++)
{
prg.Report((double)i / 100);
prg.SetStatus(i.ToString());
System.Threading.Thread.Sleep(100);
}
}
}
private static void SketchSegmentColors(IXApplication app)
{
var seg = app.Documents.Active.Selections.First() as IXSketchSegment;
var color = seg.Color;
seg.Color = System.Drawing.Color.Purple;
}
private static void CreateDrawingView(IXApplication app)
{
var partDoc = app.Documents.Active as IXDocument3D;
var view = partDoc.ModelViews[StandardViewType_e.Right];
var drw = app.Documents.NewDrawing();
var drwView = drw.Sheets.Active.DrawingViews.CreateModelViewBased(view);
}
private static void CreateSketchEntities(IXApplication app)
{
var sketch3D = app.Documents.Active.Features.PreCreate3DSketch();
var line = (IXSketchLine)sketch3D.Entities.PreCreateLine();
line.Color = System.Drawing.Color.Green;
line.StartCoordinate = new Point(0.1, 0.1, 0.1);
line.EndCoordinate = new Point(0.2, 0.2, 0.2);
sketch3D.Entities.AddRange(new IXSketchEntity[] { line });
app.Documents.Active.Features.Add(sketch3D);
var c = line.EndPoint.Coordinate;
sketch3D.IsEditing = true;
line.EndPoint.Coordinate = new Point(0.3, 0.3, 0.3);
sketch3D.IsEditing = false;
}
private static void TraverseSelectedFaces(IXApplication app)
{
foreach (var face in app.Documents.Active.Selections.OfType<IXFace>())
{
Console.WriteLine(face.Area);
}
}
private static void CreateSweepFromSelection(ISwApplication app)
{
var doc = app.Documents.Active;
var polyline = app.MemoryGeometryBuilder.WireBuilder.PreCreatePolyline();
polyline.Points = new Point[]
{
new Point(0, 0, 0),
new Point(0.01, 0.01, 0),
new Point(0.02, 0, 0),
new Point(0, 0, 0)
};
polyline.Commit();
var reg = app.MemoryGeometryBuilder.CreatePlanarSheet(
app.MemoryGeometryBuilder.CreateRegionFromSegments(polyline)).Bodies.First();
var pathSeg = app.Documents.Active.Selections.Last() as IXSketchSegment;
var pathCurve = pathSeg.Definition;
var sweep = app.MemoryGeometryBuilder.SolidBuilder.PreCreateSweep();
sweep.Profiles = new IXRegion[] { reg };
sweep.Path = pathCurve;
sweep.Commit();
var body = (sweep.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
}
private static void CreateTempGeometry(IXApplication app)
{
var sweepArc = app.MemoryGeometryBuilder.WireBuilder.PreCreateCircle();
sweepArc.Center = new Point(0, 0, 0);
sweepArc.Axis = new Vector(0, 0, 1);
sweepArc.Diameter = 0.01;
sweepArc.Commit();
var sweepLine = app.MemoryGeometryBuilder.WireBuilder.PreCreateLine();
sweepLine.StartCoordinate = new Point(0, 0, 0);
sweepLine.EndCoordinate = new Point(1, 1, 1);
sweepLine.Commit();
var sweep = app.MemoryGeometryBuilder.SolidBuilder.PreCreateSweep();
sweep.Profiles = new IXRegion[] { app.MemoryGeometryBuilder.CreatePlanarSheet(
app.MemoryGeometryBuilder.CreateRegionFromSegments(sweepArc)).Bodies.First() };
sweep.Path = sweepLine;
sweep.Commit();
var body = (sweep.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
var cone = app.MemoryGeometryBuilder.CreateSolidCone(
new Point(0, 0, 0),
new Vector(1, 1, 1),
0.1, 0.05, 0.2);
body = (cone.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
var arc = app.MemoryGeometryBuilder.WireBuilder.PreCreateCircle();
arc.Center = new Point(-0.1, 0, 0);
arc.Axis = new Vector(0, 0, 1);
arc.Diameter = 0.01;
arc.Commit();
var axis = app.MemoryGeometryBuilder.WireBuilder.PreCreateLine();
axis.StartCoordinate = new Point(0, 0, 0);
axis.EndCoordinate = new Point(0, 1, 0);
axis.Commit();
var rev = app.MemoryGeometryBuilder.SolidBuilder.PreCreateRevolve();
rev.Angle = Math.PI * 2;
rev.Axis = axis;
rev.Profiles = new IXRegion[] { app.MemoryGeometryBuilder.CreatePlanarSheet(
app.MemoryGeometryBuilder.CreateRegionFromSegments(arc)).Bodies.First() };
rev.Commit();
body = (rev.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
var box = app.MemoryGeometryBuilder.CreateSolidBox(
new Point(0, 0, 0),
new Vector(1, 1, 1),
new Vector(1, 1, 1).CreateAnyPerpendicular(),
0.1, 0.2, 0.3);
body = (box.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
var polyline = app.MemoryGeometryBuilder.WireBuilder.PreCreatePolyline();
polyline.Points = new Point[]
{
new Point(0, 0, 0),
new Point(0.1, 0.1, 0),
new Point(0.2, 0, 0),
new Point(0, 0, 0)
};
polyline.Commit();
var extr = app.MemoryGeometryBuilder.SolidBuilder.PreCreateExtrusion();
extr.Depth = 0.5;
extr.Direction = new Vector(1, 1, 1);
extr.Profiles = new IXRegion[] { app.MemoryGeometryBuilder.CreatePlanarSheet(
app.MemoryGeometryBuilder.CreateRegionFromSegments(polyline)).Bodies.First() };
extr.Commit();
body = (extr.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
var cyl = app.MemoryGeometryBuilder.CreateSolidCylinder(
new Point(0, 0, 0), new Vector(1, 0, 0), 0.1, 0.2);
body = (cyl.Bodies.First() as ISwBody).Body;
(app.Documents.Active as ISwPart).Part.CreateFeatureFromBody3(body, false, 0);
}
}
}
|
import * as React from "react";
import { Trans } from "@lingui/macro";
import { Tooltip } from "reactjs-components";
import FormGroupHeading from "#SRC/js/components/form/FormGroupHeading";
import FormGroupHeadingContent from "#SRC/js/components/form/FormGroupHeadingContent";
import InfoTooltipIcon from "#SRC/js/components/form/InfoTooltipIcon";
import PlacementPartial from "#PLUGINS/jobs/src/js/components/form/PlacementPartial";
import {
PlacementConstraint,
FormError,
FormOutput,
} from "#PLUGINS/jobs/src/js/components/form/helpers/JobFormData";
import RegionSelection from "./RegionSelection";
interface JobPlacementSectionProps {
formData: FormOutput;
errors: FormError[];
showErrors: boolean;
onRemoveItem: (path: string, index: number) => void;
onAddItem: (path: string) => void;
}
class JobPlacementSection extends React.Component<
JobPlacementSectionProps,
{}
> {
public getHeader(): React.ReactNode {
return (
<h1 className="flush-top short-bottom">
<FormGroupHeading>
<FormGroupHeadingContent primary={true} title="Placement">
<Trans>Placement</Trans>
</FormGroupHeadingContent>
<FormGroupHeadingContent>
<Tooltip
content={
<Trans>
You can configure the placement of agent nodes in regions and
zones for high availability or to expand capacity to new
regions when necessary.
</Trans>
}
interactive={true}
maxWidth={300}
wrapText={true}
>
<InfoTooltipIcon />
</Tooltip>
</FormGroupHeadingContent>
</FormGroupHeading>
</h1>
);
}
public getRegionSelection(regionIndex: number): React.ReactNode {
const { placementConstraints = [] } = this.props.formData;
const selectProps = {
name: `${regionIndex}.regionConstraint`,
type: "text",
value: placementConstraints[regionIndex]?.value || "",
};
return <RegionSelection selectProps={selectProps} />;
}
public getRegionIndex(): number {
const { placementConstraints = [] } = this.props.formData;
const index = placementConstraints.findIndex((c) => c.type === "region");
return index !== -1 ? index : placementConstraints.length;
}
public isGenericConstraintFactory(index: number) {
return (constraint: PlacementConstraint) => {
const { placementConstraints = [] } = this.props.formData;
return constraint !== placementConstraints[index];
};
}
public render() {
const regionIndex = this.getRegionIndex();
return (
<div>
{this.getHeader()}
{this.getRegionSelection(regionIndex)}
<h2 className="short-bottom">
<Trans>Advanced Constraints</Trans>{" "}
<Tooltip
content={
<Trans>
Constraints have three parts: a field name, an operator, and an
optional parameter. The field can be the hostname of the agent
node or any attribute of the agent node.
</Trans>
}
interactive={true}
maxWidth={300}
wrapText={true}
>
<InfoTooltipIcon />
</Tooltip>
</h2>
<Trans render="p">
Control where your job runs with advanced rules and constraint
attributes
</Trans>
<PlacementPartial
addButtonText={<Trans>Add Constraint</Trans>}
getIsGenericConstraint={this.isGenericConstraintFactory(regionIndex)}
{...this.props}
/>
</div>
);
}
}
export default JobPlacementSection;
|
- [X] consider the merits of writing the entire file to the log when
it changes;
- considered - I can't think of a good reason to do this;
- [X] adjust the log format so that it's more in-line with what gets
thrown in syslog;
- [X] ~~log to syslog instead of to a file;~~ postponed indefinitely
- this can be done to a remote syslog server, but not to a local
one that's only configured to listen on a Unix domain socket
(instead of localhost), because Java;
- [ ] handle the exceptions that can get thrown by "spit";
- [ ] add an option (-N) for specifying a Nagios command pipe to
write to...;
- ...but only if we can establish a coherent use case for this;
- describe how the use-case would work;
- [ ] find-out how to get environment variable settings, for the
purposes of getting CWD and printing absolute path to output
file (which should be in "./tmp");
- [ ] write an init script;
- I cheated - I disabled exception throwing for clj-http/get;
- [ ] document possible outputs for the benefit of using an external
tool (e.g., Splunk);
- [ ] work-out what it takes to use the `index.txt` file as a
persistent configuration state across runs, while still
honoring the command-line flags and their defaults in an
intelligible way;
- [ ] revamp the logging framework to use something more flexible;
- [ ] find-out why :request-time is so much smaller than the measured
time (is the overhead really that high?);
- [ ] parameterize the request timeout values, instead of having them
set to the current hard-coded values;
- [ ] find-out if log4j is already thread-safe (probably) or if I
need to wrap it in agent abstractions as a precaution;
- [X] The arg processing and merging is really broken. Need to fix.
I didn't finally realize this until I started adding support
for checking multiple URLs concurrently.
- [ ] update the README to reflect the latest feature additions;
- [ ] see if log4j will let us change the name logged to be something
besides the thread-pool ID (say, using a prefix);
- [X] revert the -l option back to -s for compatibility, and to not
break semver;
- [ ] produce some ancillary documentation explaining the rationale
behind the options merging;
- [ ] produce some ancillary documentation explaining use-cases for
Splunk and Nagios;
- [ ] decide on a consistent convention for using "str" vs. "format";
- [ ] "load-index" and "save-index" should probably be more
appropriately named "init-index" and "update-index";
- [ ] give this a new name with multiple segments in it - single-segment names
are bad mojo (see page 330 of the Clojure Programming book from O'Reilly)
|
package blinky
import ammonite.ops.Path
import blinky.run.Instruction._
package object run {
implicit class ConsoleSyntax[+A](self: Instruction[A]) {
def map[B](f: A => B): Instruction[B] =
flatMap(a => succeed(f(a)))
def flatMap[B](
f: A => Instruction[B]
): Instruction[B] =
self match {
case Return(value) =>
f(value())
case Empty(next) =>
next.flatMap(f)
case PrintLine(line, next) =>
PrintLine(line, next.flatMap(f))
case PrintErrorLine(line, next) =>
PrintErrorLine(line, next.flatMap(f))
case RunSync(op, args, envArgs, path, next) =>
RunSync(op, args, envArgs, path, next.flatMap(f))
case RunAsync(op, args, envArgs, path, next) =>
RunAsync(op, args, envArgs, path, next(_: Either[String, String]).flatMap(f))
case RunAsyncSuccess(op, args, envArgs, path, next) =>
RunAsyncSuccess(op, args, envArgs, path, next(_: Boolean).flatMap(f))
case RunAsyncEither(op, args, envArgs, path, next) =>
RunAsyncEither(op, args, envArgs, path, next(_: Either[String, String]).flatMap(f))
case MakeTemporaryDirectory(next) =>
MakeTemporaryDirectory(next(_: Path).flatMap(f))
case MakeDirectory(path, next) =>
MakeDirectory(path, next.flatMap(f))
case CopyInto(from, to, next) =>
CopyInto(from, to, next.flatMap(f))
case CopyResource(resource, destinationPath, next) =>
CopyResource(resource, destinationPath, next.flatMap(f))
case WriteFile(path, content, next) =>
WriteFile(path, content, next.flatMap(f))
case ReadFile(path, next) =>
ReadFile(path, next(_: Either[Throwable, String]).flatMap(f))
case IsFile(path, next) =>
IsFile(path, next(_: Boolean).flatMap(f))
case CopyRelativeFiles(filesToCopy, fromPath, toPath, next) =>
CopyRelativeFiles(
filesToCopy,
fromPath,
toPath,
next(_: Either[Throwable, Unit]).flatMap(f)
)
case Timeout(runFunction, millis, next) =>
Timeout(runFunction, millis, next(_: Option[Boolean]).flatMap(f))
case LsFiles(basePath, next) =>
LsFiles(basePath, next(_: Seq[String]).flatMap(f))
}
}
}
|
# -*- encoding : utf-8 -*-
require 'spec_helper'
describe 'rbenv::definition', :type => :define do
let(:user) { 'tester' }
let(:ruby_version) { '1.9.3-p125' }
let(:title) { "rbenv::definition::#{user}::#{ruby_version}" }
let(:dot_rbenv) { "/home/#{user}/.rbenv" }
let(:target_path) { "#{dot_rbenv}/plugins/ruby-build/share/ruby-build/#{ruby_version}" }
let(:params) { {:user => user, :ruby => ruby_version, :source => definition} }
context 'puppet' do
let(:definition) { 'puppet:///custom-definition' }
it 'copies the file to the right path' do
should contain_file("rbenv::definition-file #{user} #{ruby_version}").with(
:path => target_path,
:source => definition
)
end
end
context 'http' do
let(:definition) { 'http://gist.com/ree' }
it 'downloads file to the right path' do
should contain_exec("rbenv::definition-file #{user} #{ruby_version}").with(
:command => "wget #{definition} -O #{target_path}",
:creates => target_path
)
end
end
context 'https' do
let(:definition) { 'https://gist.com/ree' }
it 'downloads file to the right path' do
should contain_exec("rbenv::definition-file #{user} #{ruby_version}").with(
:command => "wget #{definition} -O #{target_path}",
:creates => target_path
)
end
end
end
|
import logging
from datetime import timedelta
from homeassistant.components.light import LightEntity
from homeassistant.helpers.entity import Entity
from custom_components.miele import DATA_CLIENT, DATA_DEVICES
from custom_components.miele import DOMAIN as MIELE_DOMAIN
PLATFORMS = ["miele"]
_LOGGER = logging.getLogger(__name__)
ALL_DEVICES = []
SUPPORTED_TYPES = [17, 18, 32, 33, 34, 68]
# pylint: disable=W0612
def setup_platform(hass, config, add_devices, discovery_info=None):
global ALL_DEVICES
devices = hass.data[MIELE_DOMAIN][DATA_DEVICES]
for k, device in devices.items():
device_type = device["ident"]["type"]
light_devices = []
if device_type["value_raw"] in SUPPORTED_TYPES:
light_devices.append(MieleLight(hass, device))
add_devices(light_devices)
ALL_DEVICES = ALL_DEVICES + light_devices
def update_device_state():
for device in ALL_DEVICES:
try:
device.async_schedule_update_ha_state(True)
except (AssertionError, AttributeError):
_LOGGER.debug(
"Component most likely is disabled manually, if not please report to developer"
"{}".format(device.entity_id)
)
class MieleLight(LightEntity):
def __init__(self, hass, device):
self._hass = hass
self._device = device
self._ha_key = "light"
@property
def device_id(self):
"""Return the unique ID for this light."""
return self._device["ident"]["deviceIdentLabel"]["fabNumber"]
@property
def unique_id(self):
"""Return the unique ID for this light."""
return self.device_id
@property
def name(self):
"""Return the name of the light."""
ident = self._device["ident"]
result = ident["deviceName"]
if len(result) == 0:
return ident["type"]["value_localized"]
else:
return result
@property
def is_on(self):
"""Return the state of the light."""
return self._device["state"]["light"] == 1
def turn_on(self, **kwargs):
service_parameters = {"device_id": self.device_id, "body": {"light": 1}}
self._hass.services.call(MIELE_DOMAIN, "action", service_parameters)
def turn_off(self, **kwargs):
service_parameters = {"device_id": self.device_id, "body": {"light": 2}}
self._hass.services.call(MIELE_DOMAIN, "action", service_parameters)
async def async_update(self):
if not self.device_id in self._hass.data[MIELE_DOMAIN][DATA_DEVICES]:
_LOGGER.debug("Miele device not found: {}".format(self.device_id))
else:
self._device = self._hass.data[MIELE_DOMAIN][DATA_DEVICES][self.device_id]
|
package com.guohao.guoke.kotlin_self
class FunParamsTest{
fun main(args : Array<String>){
var a = ArrayList<Int>()
var b = Array(5) {""}
}
} |
package com.cloudx.ktx.core
import androidx.lifecycle.Lifecycle
import androidx.lifecycle.LifecycleObserver
import androidx.lifecycle.OnLifecycleEvent
import androidx.lifecycle.ProcessLifecycleOwner
/**
* @Author petterp
* @Date 2020/7/14-3:55 PM
* @Email [email protected]
* @Function LifeCycle工具
*/
class LifecycleChecker : LifecycleObserver {
companion object {
var isFrontDesk = true
fun init() {
ProcessLifecycleOwner.get().lifecycle.addObserver(LifecycleChecker())
}
}
/** 应用进入后台 */
@OnLifecycleEvent(Lifecycle.Event.ON_STOP)
fun onAppBackGround() {
isFrontDesk = false
}
/** 应用进入前台 */
@OnLifecycleEvent(Lifecycle.Event.ON_START)
fun onAppForeGround() {
isFrontDesk = true
}
} |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var writer_1 = require("../writer");
var reader_1 = require("../reader");
describe("Writing a PGN game", function () {
// Ensure that whole games are created correctly
// Moves are normally checked in a separate spec
var writer = new writer_1.PgnWriter();
function _gm(_pgn, _fen) {
if (_fen === void 0) { _fen = null; }
var pgn = new reader_1.PgnReader();
var game = pgn.generateMoves(_pgn);
var moves = game.getMoves();
pgn.validate(moves, _fen);
return game;
}
var notations = [
{ orig: "1. e4", target: "1. e4" },
{ orig: "1. e4 1. e5", target: "1. e4 e5" },
{ orig: "1. e4 1. e5 2. Nf3 Nc6", target: "1. e4 e5 2. Nf3 Nc6" },
{ orig: "1. e4 1... e5 2. Nf3 2. Nc6", target: "1. e4 e5 2. Nf3 Nc6" },
{ orig: "e4 e5 Nf3 Nc6", target: "1. e4 e5 2. Nf3 Nc6" },
{ orig: "e2-e4 e5 Ng1-f3 Nb8-c6", target: "1. e4 e5 2. Nf3 Nc6" },
];
notations.forEach(function (notation, i) {
it(i + " should understand notation: " + notation.orig, function () {
var target = writer.write(_gm(notation.orig));
expect(notation.target).toEqual(target);
});
});
});
//# sourceMappingURL=writer-spec.js.map |
mod spatial_hash_cell;
mod spatial_hash_table;
mod header;
mod coord;
pub use self::spatial_hash_cell::*;
pub use self::spatial_hash_table::*;
pub use self::header::*;
pub use self::coord::*;
|
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE BangPatterns #-}
module Internal.Control (
modifyMVar',
modifyMVar_',
updateMVar',
updateMVar_'
) where
import Control.Monad (void)
import Control.Concurrent.MVar
import Control.Exception
modifyMVar' ∷ MVar α → (α → IO (α, β)) → IO β
modifyMVar' mv f = mask $ \restore → do
v ← takeMVar mv
(v', r) ← (restore $ f v) `onException` putMVar mv v
putMVar mv $! v'
return r
modifyMVar_' ∷ MVar α → (α → IO α) → IO ()
modifyMVar_' mv f = mask $ \restore → do
v ← takeMVar mv
v' ← (restore $ f v) `onException` putMVar mv v
putMVar mv $! v'
updateMVar' ∷ MVar α → (α → α) → IO α
updateMVar' mv f = mask_ $ do
v ← takeMVar mv
let !v' = f v
putMVar mv v'
return v'
updateMVar_' ∷ MVar α → (α → α) → IO ()
updateMVar_' mv f = void $ updateMVar' mv f
|
package com.steven.movieapp.widget.refreshLoad
import android.util.SparseArray
import android.view.View
import android.view.ViewGroup
import androidx.recyclerview.widget.GridLayoutManager
import androidx.recyclerview.widget.RecyclerView
import androidx.recyclerview.widget.StaggeredGridLayoutManager
/**
* Description:
* Data:2019/2/20
* Actor:Steven
*/
class WrapRecyclerAdapter(private val adapter: RecyclerView.Adapter<RecyclerView.ViewHolder>) :
RecyclerView.Adapter<RecyclerView.ViewHolder>() {
private val mHeaderViews: SparseArray<View> = SparseArray()
private val mFooterViews: SparseArray<View> = SparseArray()
companion object {
private var BASE_ITEM_TYPE_HEADER: Int = 100
private var BASE_ITEM_TYPE_FOOTER: Int = 200
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
if (isHeaderViewType(viewType)) {
val headerView = mHeaderViews.get(viewType)
return createHeaderFooterViewHolder(headerView)
}
if (isFooterViewType(viewType)) {
val footerView = mFooterViews.get(viewType)
return createHeaderFooterViewHolder(footerView)
}
return adapter.onCreateViewHolder(parent, viewType)
}
override fun getItemCount(): Int = adapter.itemCount + mHeaderViews.size() + mFooterViews.size()
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
if (isHeaderPosition(position) || isFooterPosition(position)) {
return
}
val adapterPosition = position - mHeaderViews.size()
adapter.onBindViewHolder(holder, adapterPosition)
}
override fun getItemViewType(position: Int): Int {
if (isHeaderPosition(position)) {
return mHeaderViews.keyAt(position)
}
if (isFooterPosition(position)) {
val index = position - mHeaderViews.size() - adapter.itemCount
return mFooterViews.keyAt(index)
}
val index = position - mHeaderViews.size()
return adapter.getItemViewType(index)
}
private fun createHeaderFooterViewHolder(view: View): RecyclerView.ViewHolder {
return object : RecyclerView.ViewHolder(view) {
}
}
private fun isHeaderViewType(viewType: Int): Boolean = mHeaderViews.indexOfKey(viewType) >= 0
private fun isFooterViewType(viewType: Int): Boolean = mFooterViews.indexOfKey(viewType) >= 0
private fun isHeaderPosition(position: Int): Boolean = position < mHeaderViews.size()
private fun isFooterPosition(position: Int): Boolean = position >= mHeaderViews.size() + adapter.itemCount
fun addHeaderView(view: View) {
val position = mHeaderViews.indexOfValue(view)
if (position < 0) {
mHeaderViews.put(BASE_ITEM_TYPE_HEADER++, view)
}
notifyDataSetChanged()
}
fun addFooterView(view: View) {
val position = mFooterViews.indexOfValue(view)
if (position < 0) {
mFooterViews.put(BASE_ITEM_TYPE_FOOTER++, view)
}
notifyDataSetChanged()
}
fun removeHeaderView(view: View) {
val index = mHeaderViews.indexOfValue(view)
if (index < 0) {
return
}
mHeaderViews.removeAt(index)
notifyDataSetChanged()
}
fun removeFooterView(view: View) {
val index = mFooterViews.indexOfValue(view)
if (index < 0) {
return
}
mFooterViews.removeAt(index)
notifyDataSetChanged()
}
/**
* 解决GridLayoutManager header和footer显示整行
*/
fun adjustSpanSize(recyclerView: RecyclerView) {
if (recyclerView.layoutManager is GridLayoutManager) {
val layoutManager = recyclerView.layoutManager as GridLayoutManager
layoutManager.spanSizeLookup = object : GridLayoutManager.SpanSizeLookup() {
override fun getSpanSize(position: Int): Int {
val isHeaderOrFooter: Boolean = isHeaderPosition(position) || isFooterPosition(position)
return if (isHeaderOrFooter) layoutManager.spanCount else 1
}
}
}
}
/**
* 当RecyclerView在windows活动时获取StaggeredGridLayoutManager布局管理器,修正header和footer显示整行
*/
override fun onViewAttachedToWindow(holder: RecyclerView.ViewHolder) {
super.onViewAttachedToWindow(holder)
val layoutParams = holder.itemView.layoutParams
if (layoutParams is StaggeredGridLayoutManager.LayoutParams) {
if (isHeaderPosition(holder.layoutPosition) || isFooterPosition(holder.layoutPosition))
layoutParams.isFullSpan = true
}
}
} |
import { BASEURL, fetch, post, patch, put, postConfig } from './http.js'
export const BASICURL = BASEURL;
// export const uploadPic = data => post('', data);
// export const getALL = () => post(地址); post
// export const getALL = () => fetch(地址); get
//POST获取 "首页"=>"获取验证码后登录"
export const signIn = data => post('admin/system/employee/sign/in', data);
//POST获取 "首页"=>"再次获取验证码后登录"
export const getCodeAgain = data => post('admin/code/sms-provider/login', data);
//post "用户管理" => "用户详情"
export const employeeDetail = data => post('admin/system/employee/detail', data);
//post "个人中心" => "修改密码"
export const fixPersonalPW = data => post('admin/system/employee/update-password', data);
// POST "系统管理" => "角色管理"
export const roleManage = (data) => post('admin/system/role/all', data);
// POST "系统管理" => "部门管理"=>"所有部门"
export const departmentManage = (data) => post('admin/system/department/all', data);
// POST "系统管理" => "部门管理"->"添加或修改部门信息"
export const addAuditDepart = data => post('admin/system/department/merge', data);
// POST "系统管理" => "部门管理"->"查看部门详细信息"
export const departDetail = data => post('admin/system/department/detail', data);
// POST "系统管理" => "部门管理"->"删除部门"
export const delDepart = data => post('admin/system/department/deletes', data);
//POST "系统管理" => "权限管理"
export const permissionManage = data => post('admin/system/permission/page-query', data);
//POST "系统管理" => "添加编辑权限"
export const addAuditPermission = data => post('admin/system/permission/merge', data);
//POST "系统管理" => "删除权限"
export const delPermission = data => post('admin/system/permission/deletes', data);
// POST "系统管理" => "角色管理"->"查询角色权限"
export const queryRolePermission = data => post('admin/system/role/permission', data);
//POST获取 "会员实名审核"
export const MemberRealNameList = data => post('admin/member/member-application/page-query', data);
//post "系统管理" => "系统日志"
export const accessLog = (url, data) => fetch(`admin/system/access-log/page-query/${url}`, data);
//post "用户管理" => "用户查询"
export const queryEmployee = data => post('admin/system/employee/page-query', data);
//post "用户管理" => "新增或者修改用户"
export const addAuditEmployee = data => post('/admin/system/employee/merge', data);
//post "用户管理" => "删除用户"
export const delEmployee = data => post('admin/system/employee/deletes', data);
//Post "系统管理" => "意见反馈查询"
export const getFeedData = data => postConfig('/admin/suggest/sugg/page-query', data);
// POST "获取全部权限"
export const getAllPermission = () => post('admin/system/role/permission/all');
// POST "系统管理" => "角色管理"->"添加或修改角色信息"
export const addAuditRole = data => post('admin/system/role/merge', data);
// POST "系统管理" => "角色管理"->"删除角色"
export const deleteRole = data => post('admin/system/role/deletes', data);
// 课程列表
// 获取所有课程分类
export const getCoursetypeList = data => fetch('admin/course/findAllCategory', data);
// 获取用户分类
export const getUserClass = data => fetch('admin/user/findAllCategory', data);
|
#!/usr/bin/env bash
set -e
echo "" > coverage.txt
go version
if [[ $TRAVIS_GO_VERSION == 1.4.3 ]]; then
go get golang.org/x/tools/cmd/cover
fi
go test -coverprofile=unit_tests.out -covermode=atomic -coverpkg=./messaging ./messaging/
# go test -v -coverprofile=errors_tests.out -covermode=atomic -coverpkg=./messaging \
# ./messaging/tests/ -test.run TestError*
go test -v -coverprofile=integration_tests.out -covermode=atomic -coverpkg=./messaging \
./messaging/tests/ -test.run '^(Test[^(?:Error)].*)'
gocovmerge unit_tests.out integration_tests.out > coverage.txt
rm unit_tests.out integration_tests.out
|
module MkExxTable (getApiExx, ApiExx, prApiEx, mkEx) where
import System.Cmd
import System.Environment
import qualified Data.Map as M
import Data.Char
main = do
xx <- getArgs
aexx <- getApiExx xx
return () -- putStrLn $ prApiExx aexx
getApiExx :: [FilePath] -> IO ApiExx
getApiExx xx = do
s <- readFile (head xx)
let aet = getApiExxTrees $ filter validOutput $ mergeOutput $ lines s
aeos <- mapM readApiExxOne xx
let aexx = mkApiExx $ ("API",aet) : aeos
-- putStrLn $ prApiExx aexx
return aexx
readApiExxOne file = do
s <- readFile file
let lang = reverse (take 3 (drop 4 (reverse file))) -- api-exx-*Eng*.txt
let api = getApiExxOne $ filter validOutput $ mergeOutput $ lines s
putStrLn $ unlines $ prApiEx api ---
return (lang,api)
-- map function -> language -> example
type ApiExx = M.Map String (M.Map String String)
-- map function -> example
type ApiExxOne = M.Map String String
-- get a map function -> example
getApiExxOne :: [String] -> ApiExxOne
getApiExxOne = M.fromList . pairs . map cleanUp
where
pairs ss = case ss of
f:_:e:rest -> (f,e) : pairs (drop 1 (dropWhile (notElem '*') rest))
_ -> []
-- get the map function -> tree
getApiExxTrees :: [String] -> ApiExxOne
getApiExxTrees = M.fromList . pairs . map cleanUp
where
pairs ss = case ss of
f:e:_:rest -> (f,e) : pairs (drop 1 (dropWhile (notElem '*') rest))
_ -> []
-- remove leading prompts and spaces
cleanUp = dropWhile (flip elem " >")
--- this makes txt2tags loop...
mergeOutput ls = ls
mergeOutputt ls = case ls of
l@('>':_):ll -> let (ll1,ll2) = span ((/=">") . take 1) ll in unwords (l : map (unwords . words) ll1) : mergeOutput ll2
_:ll -> mergeOutput ll
_ -> []
-- only accept lines starting with prompts (to eliminate multi-line gf uncomputed output)
validOutput = (==">") . take 1
mkApiExx :: [(String,ApiExxOne)] -> ApiExx
mkApiExx ltes =
M.fromList [(t,
M.fromList [(l,maybe "NONE" id (M.lookup t te)) | (l,te) <- ltes])
| t <- M.keys firstL]
where
firstL = snd (head ltes)
prApiExx :: ApiExx -> String
prApiExx aexx = unlines
[unlines (t:prApiEx lexx) | (t,lexx) <- M.toList aexx]
prApiEx :: M.Map String String -> [String]
prApiEx apexx = case M.toList apexx of
(a,e):lexx -> (a ++ ": ``" ++ unwords (words e) ++ "``"):
[l ++ ": //" ++ mkEx l e ++ "//" | (l,e) <- lexx]
mkEx l = unws . bind . mkE . words where
unws = if elem l ["Chi","Jpn","Tha"] then concat else unwords -- remove spaces
mkE e = case e of
"atomic":"term":_ -> ["*"]
"[]":_ -> ["''"]
"(table":es -> ["..."]
"table":es -> ["..."]
('{':_):es -> ["..."]
"pre":p@('{':_):es -> init (init (drop 2 p)) : ["..."]
--- "pre":p@('{':_):es -> init (init (drop 2 p)) : reverse (takeWhile ((/='}') . head) (reverse es))
e0:es -> e0:mkE es
_ -> e
bind ws = case ws of
w : "&+" : u : ws2 -> bind ((w ++ u) : ws2)
w : "Predef.BIND" : u : ws2 -> bind ((w ++ u) : ws2)
w : "Predef.SOFT_BIND" : u : ws2 -> bind ((w ++ u) : ws2)
"&+":ws2 -> bind ws2
"Predef.BIND":ws2 -> bind ws2
"Predef.SOFT_BIND":ws2 -> bind ws2
w : ws2 -> w : bind ws2
w : "++" : ws2 -> w : bind ws2
_ -> ws
|
<?php
/**
* Copyright 2015 Bit API Hub
*
* Initial migration
*/
namespace Fuel\Migrations;
class Create_All
{
function up()
{
\DBUtil::create_table('accounts', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'consumer_key' => array('type' => 'varchar', 'constraint' => 36),
'consumer_secret' => array('type' => 'varchar', 'constraint' => 122),
'access_level' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'max_calls' => array('type' => 'int', 'constraint' => 1, 'default' => 0),
'reset_usage' => array('type' => 'int', 'constraint' => 11),
'free_account_on' => array('type' => 'int', 'constraint' => 11),
'can_run_inactive' => array('type' => 'int', 'constraint' => 1, 'default' => 0),
'acl_type' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'link_back' => array('type' => 'int', 'constraint' => 1, 'default' => 0),
'js_calls_allowed' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'store_credentials' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'deleted_at' => array('type' => 'varchar', 'constraint' => 11, 'default' => null, 'null' => true),
), array('id'));
\DBUtil::create_table('accounts_metadata', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'account_id' => array('type' => 'int', 'constraint' => 11),
'key' => array('type' => 'varchar', 'constraint' => 20),
'value' => array('type' => 'text'),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'deleted_at' => array('type' => 'varchar', 'constraint' => 11, 'default' => null, 'null' => true),
), array('id'));
\DBUtil::create_table('apis', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'account_id' => array('type' => 'int', 'constraint' => 11),
'name' => array('type' => 'varchar', 'constraint' => 50),
'min_access_level' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'active_level' => array('type' => 'int', 'constraint' => 1),
'private' => array('type' => 'int', 'constraint' => 1),
'secret' => array('type' => 'varchar', 'constraint' => 122),
'force_validation' => array('type' => 'int', 'constraint' => 1),
'allow_custom_dynamic' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'deleted_at' => array('type' => 'varchar', 'constraint' => 11, 'default' => null, 'null' => true),
), array('id'));
\DBUtil::create_table('apis_metadata', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'apis_id' => array('type' => 'int', 'constraint' => 11),
'key' => array('type' => 'varchar', 'constraint' => 20),
'value' => array('type' => 'text'),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'deleted_at' => array('type' => 'varchar', 'constraint' => 11, 'default' => null, 'null' => true),
), array('id'));
\DBUtil::create_table('api_stats', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'apis_id' => array('type' => 'int', 'constraint' => 11),
'code' => array('type' => 'int', 'constraint' => 3),
'call' => array('type' => 'varchar', 'constraint' => 150),
'is_static' => array('type' => 'int', 'constraint' => 1),
'count' => array('type' => 'int', 'constraint' => 11),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
), array('id'));
\DBUtil::create_table('data_calls', array(
'id' => array('type' => 'int', 'constraint' => 11, 'auto_increment' => true),
'account_id' => array('type' => 'int', 'constraint' => 11),
'name' => array('type' => 'varchar', 'constraint' => 50),
'call_script' => array('type' => 'text'),
'active_level' => array('type' => 'int', 'constraint' => 1),
'min_access_level' => array('type' => 'int', 'constraint' => 1, 'default' => 1),
'created_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'updated_at' => array('type' => 'int', 'constraint' => 11, 'default' => 0),
'deleted_at' => array('type' => 'varchar', 'constraint' => 11, 'default' => null, 'null' => true),
), array('id'));
}
function down()
{
\DBUtil::drop_table('accounts');
\DBUtil::drop_table('accounts_metadata');
\DBUtil::drop_table('apis');
\DBUtil::drop_table('apis_metadata');
\DBUtil::drop_table('api_stats');
\DBUtil::drop_table('data_calls');
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace DbScriptDeploy.UI.Utils
{
public class ControlUtils
{
/// <summary>
/// Utility method to ensure "hidden" or reserved characters get displayed correctly.
/// </summary>
/// <param name="underscores"></param>
/// <returns></returns>
public static string EscapeContent(string content, bool showUnderscores = true)
{
string result = content;
if (showUnderscores)
{
result = (result ?? "").Replace("_", "__");
}
return result;
}
}
}
|
# LOOPS
Write-Output('CLASSICAL LOOP') # CLASSICAL LOOP
for ($i = 0; $i -lt 10; $i++) {
Write-Output('i: ' + $i)
}
Write-Output('CLASSICAL LOOP WITH ARRAYS') # CLASSICAL LOOP WITH ARRAYS
$numbers = @(-3.2, 5.3, 3, 1)
for ($i = 0; $i -lt $numbers.Length; $i++) {
Write-Output($numbers[$i])
}
Write-Output('REVERSE LOOP') # REVERSE LOOP
for ($i = 5; $i -gt -1; $i--) {
Write-Output('i: ' + $i)
}
Write-Output('NESTED LOOP') # NESTED LOOP
for ($i = 0; $i -lt 3; $i++) {
Write-Output('i: ' + $i)
for ($j = 2; $j -gt 0; $j--) {
Write-Output('j: ' + $j)
for ($k = 0; $k -lt 2; $k++) {
Write-Output('k: ' + $k)
}
}
}
Write-Output('ITERATOR LOOP') # ITERATOR LOOP
$colors = @('red', 'green', 'blue', 'yellow')
foreach ($color in $colors) {
Write-Output($color)
}
Write-Output('WHILE LOOP') # WHILE LOOP
$i = 0
while ($i -le 3) {
$i++
Write-Output('Hello ' + $i)
}
Write-Output('EXAMPLE 1 (SUMMATION): sum up all the elements of an array') # EXAMPLE 1
$my_array = @(-4.3, 2, -0.7, 1.5, 3.5)
$total_sum = 0
for ($i = 0; $i -lt $my_array.Length; $i++) {
$total_sum = $total_sum + $my_array[$i]
}
Write-Output('The sum of all the elements of the array is ' + $total_sum)
Write-Output('EXAMPLE 2 (PRODUCT): multiply all the elements of an array') # EXAMPLE 2
$my_array= @(3, 2, -1, 1, 4)
$prod = 1
for ($i = 0; $i -lt $my_array.Length; $i++) {
$prod = $prod * $my_array[$i]
}
Write-Output('The multiplication of all the elements of the array is ' + $prod)
|
using Microsoft.AspNetCore.Routing;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Cofoundry.Web
{
/// <summary>
/// Implement this interface to define routes that should be
/// injected into the MVC route collection during the startup
/// process. IRunAfterRouteRegistration or IRunBeforeRouteRegistration
/// can be used to affect the ordering of registrations, but otherwise
/// the ordering that instances of IRouteRegistration get run cannot
/// is not guaranteed.
/// </summary>
public interface IRouteRegistration
{
/// <summary>
/// Register routes with the mvc RouteCollection.
/// </summary>
void RegisterRoutes(IRouteBuilder routeBuilder);
}
}
|
<?php
namespace Oro\Bundle\IntegrationBundle\Provider;
interface IconAwareIntegrationInterface
{
/**
* Returns icon path for UI, should return value like 'bundles/acmedemo/img/logo.png'
* Relative path to assets helper
*
* @return string
*/
public function getIcon();
}
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sugar-high"
s.version = "0.7.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Kristian Mandrup"]
s.date = "2012-10-18"
s.description = "More Ruby sugar - inspired by the 'zuker' project"
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
".document",
".rspec",
".rvmrc",
".travis.yml",
"CHANGELOG.md",
"Gemfile",
"LICENSE",
"README.textile",
"Rakefile",
"lib/sugar-high.rb",
"lib/sugar-high/alias.rb",
"lib/sugar-high/arguments.rb",
"lib/sugar-high/array.rb",
"lib/sugar-high/blank.rb",
"lib/sugar-high/boolean.rb",
"lib/sugar-high/class_ext.rb",
"lib/sugar-high/delegate.rb",
"lib/sugar-high/dsl.rb",
"lib/sugar-high/enumerable.rb",
"lib/sugar-high/file.rb",
"lib/sugar-high/file_ext.rb",
"lib/sugar-high/hash.rb",
"lib/sugar-high/includes.rb",
"lib/sugar-high/kind_of.rb",
"lib/sugar-high/math.rb",
"lib/sugar-high/metaclass.rb",
"lib/sugar-high/methods.rb",
"lib/sugar-high/module.rb",
"lib/sugar-high/not.rb",
"lib/sugar-high/numeric.rb",
"lib/sugar-high/path.rb",
"lib/sugar-high/properties.rb",
"lib/sugar-high/rails/concerns.rb",
"lib/sugar-high/range.rb",
"lib/sugar-high/regexp.rb",
"lib/sugar-high/rspec/configure.rb",
"lib/sugar-high/rspec/matchers/have_aliases.rb",
"lib/sugar-high/string.rb",
"lib/sugar-high/version.rb",
"spec/fixtures/empty.txt",
"spec/fixtures/non-empty.txt",
"spec/fixtures/search_file.txt",
"spec/spec_helper.rb",
"spec/sugar-high/alias_spec.rb",
"spec/sugar-high/arguments_spec.rb",
"spec/sugar-high/array_spec.rb",
"spec/sugar-high/blank_spec.rb",
"spec/sugar-high/delegate_spec.rb",
"spec/sugar-high/dsl_spec.rb",
"spec/sugar-high/file_spec.rb",
"spec/sugar-high/hash_spec.rb",
"spec/sugar-high/includes_spec.rb",
"spec/sugar-high/kind_of_spec.rb",
"spec/sugar-high/methods_spec.rb",
"spec/sugar-high/module_spec.rb",
"spec/sugar-high/numeric_spec.rb",
"spec/sugar-high/path_spec.rb",
"spec/sugar-high/properties_spec.rb",
"spec/sugar-high/rails/concerns_spec.rb",
"spec/sugar-high/rails/fixture_user/scopes.rb",
"spec/sugar-high/rails/fixture_user/validations.rb",
"spec/sugar-high/rails/shared/associations.rb",
"spec/sugar-high/rails/shared/caching.rb",
"spec/sugar-high/range_spec.rb",
"spec/sugar-high/regexp_spec.rb",
"spec/sugar-high/string_spec.rb",
"sugar-high.gemspec"
]
s.homepage = "http://github.com/kristianmandrup/sugar-high"
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Ruby convenience sugar packs!"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 2.4.0"])
s.add_development_dependency(%q<rails>, [">= 3.0.1"])
s.add_development_dependency(%q<jeweler>, [">= 1.6.4"])
s.add_development_dependency(%q<bundler>, [">= 1.0.1"])
s.add_development_dependency(%q<rdoc>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 2.4.0"])
s.add_dependency(%q<rails>, [">= 3.0.1"])
s.add_dependency(%q<jeweler>, [">= 1.6.4"])
s.add_dependency(%q<bundler>, [">= 1.0.1"])
s.add_dependency(%q<rdoc>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 2.4.0"])
s.add_dependency(%q<rails>, [">= 3.0.1"])
s.add_dependency(%q<jeweler>, [">= 1.6.4"])
s.add_dependency(%q<bundler>, [">= 1.0.1"])
s.add_dependency(%q<rdoc>, [">= 0"])
end
end
|
##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests of the contenttypes extension mechanism.
"""
from __future__ import print_function
import unittest
class ContentTypesTestCase(unittest.TestCase):
def setUp(self):
import mimetypes
mimetypes.init()
self._old_state = mimetypes.__dict__.copy()
def tearDown(self):
import mimetypes
mimetypes.__dict__.clear()
mimetypes.__dict__.update(self._old_state)
def _check_types_count(self, delta):
import mimetypes
self.assertEqual(len(mimetypes.types_map),
len(self._old_state["types_map"]) + delta)
def _getFilename(self, name):
import os.path
here = os.path.dirname(os.path.abspath(__file__))
return os.path.join(here, name)
def test_main(self):
import zope.contenttype
result = []
zope.contenttype.print = result.append
zope.contenttype.main()
del zope.contenttype.print
self.assertTrue(result)
def test_guess_content_type(self):
from zope.contenttype import add_files
from zope.contenttype import guess_content_type
filename = self._getFilename('mime.types-1')
add_files([filename])
ctype, _encoding = guess_content_type(body=b'text file')
self.assertEqual(ctype, "text/plain")
ctype, _encoding = guess_content_type(body=b'\001binary')
self.assertEqual(ctype, "application/octet-stream")
ctype, _encoding = guess_content_type()
self.assertEqual(ctype, "text/x-unknown-content-type")
def test_add_one_file(self):
from zope.contenttype import add_files
from zope.contenttype import guess_content_type
filename = self._getFilename('mime.types-1')
add_files([filename])
ctype, encoding = guess_content_type("foo.ztmt-1")
self.assertTrue(encoding is None)
self.assertEqual(ctype, "text/x-vnd.zope.test-mime-type-1")
ctype, encoding = guess_content_type("foo.ztmt-1.gz")
self.assertEqual(encoding, "gzip")
self.assertEqual(ctype, "text/x-vnd.zope.test-mime-type-1")
self._check_types_count(1)
def test_add_two_files(self):
from zope.contenttype import add_files
from zope.contenttype import guess_content_type
filename1 = self._getFilename('mime.types-1')
filename2 = self._getFilename('mime.types-2')
add_files([filename1, filename2])
ctype, encoding = guess_content_type("foo.ztmt-1")
self.assertTrue(encoding is None)
self.assertEqual(ctype, "text/x-vnd.zope.test-mime-type-1")
ctype, encoding = guess_content_type("foo.ztmt-2")
self.assertTrue(encoding is None)
self.assertEqual(ctype, "text/x-vnd.zope.test-mime-type-2")
self._check_types_count(2)
def test_text_type(self):
HTML = b'<HtmL><body>hello world</body></html>'
from zope.contenttype import text_type
self.assertEqual(text_type(HTML),
'text/html')
self.assertEqual(text_type(b'<?xml version="1.0"><foo/>'),
'text/xml')
self.assertEqual(text_type(b'<?XML version="1.0"><foo/>'),
'text/xml')
self.assertEqual(text_type(b'foo bar'),
'text/plain')
self.assertEqual(text_type(b'<!DOCTYPE HTML PUBLIC '
b'"-//W3C//DTD HTML 4.01 Transitional//EN" '
b'"http://www.w3.org/TR/html4/loose.dtd">'),
'text/html')
self.assertEqual(text_type(b'\n\n<!DOCTYPE html>\n'), 'text/html')
# we can also parse text snippets
self.assertEqual(text_type(b'<p>Hello</p>'), 'text/html')
longtext = b'abc ' * 100
self.assertEqual(text_type(b'<p>' + longtext + b'</p>'), 'text/html')
# See https://bugs.launchpad.net/bugs/487998
self.assertEqual(text_type(b' ' * 14 + HTML),
'text/html')
self.assertEqual(text_type(b' ' * 14 + b'abc'),
'text/plain')
self.assertEqual(text_type(b' ' * 14),
'text/plain')
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
#include "iqrf/request/DPANodeRemoveBondRequest.h"
using namespace BeeeOn;
static const uint8_t REMOVE_BOND_CMD = 0x01;
DPANodeRemoveBondRequest::DPANodeRemoveBondRequest(uint8_t node):
DPARequest(
node,
DPA_NODE_PNUM,
REMOVE_BOND_CMD
)
{
}
|
// Copyright 2012 Viewfinder. All rights reserved.
// Author: Spencer Kimball.
#import <re2/re2.h>
#import "ActivityTable.h"
#import "AppState.h"
#import "CommentTable.h"
#import "DayTable.h"
#import "FullTextIndex.h"
#import "LazyStaticPtr.h"
#import "StringUtils.h"
namespace {
const DBRegisterKeyIntrospect kCommentKeyIntrospect(
DBFormat::comment_key(), NULL, [](Slice value) {
return DBIntrospect::FormatProto<CommentMetadata>(value);
});
const DBRegisterKeyIntrospect kCommentServerKeyIntrospect(
DBFormat::comment_server_key(), NULL, [](Slice value) {
return value.ToString();
});
LazyStaticPtr<RE2, const char*> kDocIDRE = { "([0-9]+),([0-9]+)" };
const string kCommentIndexName = "com";
} // namespace
////
// Comment
CommentTable_Comment::CommentTable_Comment(
AppState* state, const DBHandle& db, int64_t id)
: state_(state),
db_(db) {
mutable_comment_id()->set_local_id(id);
}
void CommentTable_Comment::MergeFrom(const CommentMetadata& m) {
// Some assertions that immutable properties don't change.
if (viewpoint_id().has_server_id() && m.viewpoint_id().has_server_id()) {
DCHECK_EQ(viewpoint_id().server_id(), m.viewpoint_id().server_id());
}
if (has_user_id() && m.has_user_id()) {
DCHECK_EQ(user_id(), m.user_id());
}
if (has_timestamp() && m.has_timestamp()) {
DCHECK_LT(fabs(timestamp() - m.timestamp()), 0.000001);
}
CommentMetadata::MergeFrom(m);
}
void CommentTable_Comment::MergeFrom(const ::google::protobuf::Message&) {
DIE("MergeFrom(Message&) should not be used");
}
void CommentTable_Comment::SaveHook(const DBHandle& updates) {
// Invalidate the activity which posted this comment, so that any
// saved changes are updated in the relevant conversation.
if (comment_id().has_server_id()) {
ActivityHandle ah = state_->activity_table()->GetCommentActivity(
comment_id().server_id(), updates);
if (ah.get()) {
state_->day_table()->InvalidateActivity(ah, updates);
}
}
}
////
// CommentTable
CommentTable::CommentTable(AppState* state)
: ContentTable<Comment>(
state, DBFormat::comment_key(), DBFormat::comment_server_key()),
comment_index_(new FullTextIndex(state_, kCommentIndexName)) {
}
CommentTable::~CommentTable() {
}
CommentHandle CommentTable::LoadComment(const CommentId& id, const DBHandle& db) {
CommentHandle ch;
if (id.has_local_id()) {
ch = LoadComment(id.local_id(), db);
}
if (!ch.get() && id.has_server_id()) {
ch = LoadComment(id.server_id(), db);
}
return ch;
}
void CommentTable::SaveContentHook(Comment* comment, const DBHandle& updates) {
vector<FullTextIndexTerm> terms;
comment_index_->ParseIndexTerms(0, comment->message(), &terms);
// Inline the viewpoint id into our "docid" so we can use this index to find viewpoints
// without extra database lookups.
const string docid(Format("%d,%d", comment->viewpoint_id().local_id(), comment->comment_id().local_id()));
comment_index_->UpdateIndex(terms, docid, FullTextIndex::TimestampSortKey(comment->timestamp()),
comment->mutable_indexed_terms(), updates);
}
void CommentTable::DeleteContentHook(Comment* comment, const DBHandle& updates) {
comment_index_->RemoveTerms(comment->mutable_indexed_terms(), updates);
}
void CommentTable::Search(const Slice& query, CommentSearchResults* results) {
ScopedPtr<FullTextQuery> parsed_query(FullTextQuery::Parse(query));
for (ScopedPtr<FullTextResultIterator> iter(comment_index_->Search(state_->db(), *parsed_query));
iter->Valid();
iter->Next()) {
const Slice docid = iter->doc_id();
Slice viewpoint_id_slice, comment_id_slice;
CHECK(RE2::FullMatch(docid, *kDocIDRE, &viewpoint_id_slice, &comment_id_slice));
const int64_t viewpoint_id = FastParseInt64(viewpoint_id_slice);
const int64_t comment_id = FastParseInt64(comment_id_slice);
results->push_back(std::make_pair(viewpoint_id, comment_id));
}
}
// local variables:
// mode: c++
// end:
|
package com.github.demidko.tokenizer
/**
* @param namesSeparators list of names-allowed symbols. For example lower_snake_case, css-case-var
* @param unaryTokens symbols always one token only. For example, {{{ -> {, {, {
* @param stringsQuotes quote types for string literals, "string" 'string' `string`
*/
fun String.tokenize(
skipSpaces: Boolean = true,
namesSeparators: String = "_",
unaryTokens: String = ".,{}=:<>/\\",
stringsQuotes: String = "\"'`"
) = mutableListOf<String>().apply {
var startTokenIdx = 0
for (finishTokenIdx in 1 until length) {
if (isDiff(startTokenIdx, finishTokenIdx, namesSeparators, unaryTokens, stringsQuotes)) {
add(substring(startTokenIdx until finishTokenIdx), skipSpaces)
startTokenIdx = finishTokenIdx
}
}
add(substring(startTokenIdx until length), skipSpaces)
}
private fun MutableList<String>.add(token: String, skipSpaces: Boolean) {
when {
!skipSpaces || token.isNotBlank() -> add(token)
}
}
/**
* We are redefining the notion of a difference between two characters,
* based on which the string is split into lexemes.
* @param typeIdx first symbol
* @param otherIdx other symbol
* @param namesSeparators list of names-allowed symbols. For example lower_snake_case, css-case-var
* @param unaryTokens symbols always one token only. For example, {{{ -> {, {, {
* @param stringsQuotes quote types for string literals, "string" 'string' `string`
* @return is the second index a break character?
*/
private fun String.isDiff(
typeIdx: Int,
otherIdx: Int,
namesSeparators: String,
unaryTokens: String,
stringsQuotes: String,
): Boolean {
// make sure symbols are available
val type = get(typeIdx)
val other = get(otherIdx)
// handle entities names
if (type.isLetter()) {
// Solve names with hyphens and underscores
// Otherwise, we will assume that the name has been entered.
return (other !in namesSeparators) && !other.isLetterOrDigit()
}
// handle characters that break anyway
if (type in unaryTokens) {
return true
}
// handle string literals
for (quote in stringsQuotes) {
if (type == quote) {
return getOrNull(otherIdx - 1) == quote
&& getOrNull(otherIdx - 2) != '\\'
&& otherIdx - 1 != typeIdx
}
}
// use the standard character type check
return type.category != other.category
} |
extern crate rjson;
use std::vec::Vec;
use std::collections::BTreeMap;
use std::convert::From;
use rjson::Value;
use rjson::Array;
use rjson::Object;
use rjson::Null;
use rjson::parse;
enum JsonValue {
Null,
Number(f64),
Bool(bool),
String(String),
Array(Vec<JsonValue>),
Object(BTreeMap<String, JsonValue>)
}
struct JsonArray(Vec<JsonValue>);
struct JsonObject(BTreeMap<String, JsonValue>);
impl Array<JsonValue, JsonObject, JsonValue> for JsonArray {
fn new() -> Self {
JsonArray(Vec::new())
}
fn push(&mut self, v: JsonValue) {
self.0.push(v)
}
}
impl Object<JsonValue, JsonArray, JsonValue> for JsonObject {
fn new<'b>() -> Self {
JsonObject(BTreeMap::new())
}
fn insert(&mut self, k: String, v: JsonValue) {
self.0.insert(k, v);
}
}
impl Null<JsonValue, JsonArray, JsonObject> for JsonValue {
fn new() -> Self {
JsonValue::Null
}
}
impl Value<JsonArray, JsonObject, JsonValue> for JsonValue {}
impl From<f64> for JsonValue {
fn from(v: f64) -> Self {
JsonValue::Number(v)
}
}
impl From<bool> for JsonValue {
fn from(v: bool) -> Self {
JsonValue::Bool(v)
}
}
impl From<String> for JsonValue {
fn from(v: String) -> Self{
JsonValue::String(v)
}
}
impl From<JsonArray> for JsonValue {
fn from(v: JsonArray) -> Self {
JsonValue::Array(v.0)
}
}
impl From<JsonObject> for JsonValue {
fn from(v: JsonObject) -> Self {
JsonValue::Object(v.0)
}
}
impl std::fmt::Debug for JsonValue {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
JsonValue::Null => f.write_str("null"),
JsonValue::String(ref v) => f.write_fmt(format_args!("\"{}\"", v)),
JsonValue::Number(ref v) => f.write_fmt(format_args!("{}", v)),
JsonValue::Bool(ref v) => f.write_fmt(format_args!("{}", v)),
JsonValue::Array(ref v) => f.write_fmt(format_args!("{:?}", v)),
JsonValue::Object(ref v) => f.write_fmt(format_args!("{:#?}", v))
}
}
}
impl std::fmt::Display for JsonValue {
fn fmt(&self, f:&mut std::fmt::Formatter) -> std::fmt::Result {
f.write_fmt(format_args!("{:?}", *self))
}
}
#[test]
fn test() {
let data = include_str!("./test.json");
let data_array: Vec<char> = data.chars().collect();
let mut index = 0;
let interpreted = parse::<JsonValue, JsonArray, JsonObject, JsonValue>(&*data_array, &mut index);
assert_eq!(index, data_array.len() - 1);
assert!(interpreted.is_some());
// That means the parser has reached and the data is there.
// We should test whether the data is good or not, but it is...boring.
println!("{}", interpreted.unwrap()); // run with --nocapture to check result.
}
|
#!/bin/sh
echo "Set GPIO 21 to read/pullup"
echo 'm 21 r' > /dev/pigpio
echo 'pud 21 u' > /dev/pigpio
|
package one.mir.http
import akka.http.scaladsl.testkit.RouteTest
import org.scalatest.matchers.{MatchResult, Matcher}
import play.api.libs.json._
import one.mir.http.ApiMarshallers._
import one.mir.api.http.ApiError
trait ApiErrorMatchers { this: RouteTest =>
class ProduceError(error: ApiError) extends Matcher[RouteTestResult] {
override def apply(left: RouteTestResult): MatchResult = left ~> check {
if (response.status != error.code) {
MatchResult(false,
"got {0} while expecting {1}, response was {2}",
"got expected status code {0}",
IndexedSeq(response.status, error.code, response.entity))
} else {
val responseJson = responseAs[JsObject]
MatchResult(responseJson == error.json,
"expected {0}, but instead got {1}",
"expected not to get {0}, but instead did get it",
IndexedSeq(error.json, responseJson))
}
}
}
def produce(error: ApiError) = new ProduceError(error)
}
|
# <span style="color: darkgreen">**How-to guides**</span>
|
import { Command } from "../Command";
import { Subforum } from "../../Store/firebase/forum/@Subforum";
export declare class UpdateSubforumDetails extends Command<{
subforumID: number;
subforumUpdates: Partial<Subforum>;
}> {
Validate_Early(): void;
oldData: Subforum;
newData: Subforum;
Prepare(): Promise<void>;
Validate(): Promise<void>;
GetDBUpdates(): {};
}
|
using LightBDD.Core.Formatting.ExceptionFormatting;
using LightBDD.Fixie2.Implementation;
using LightBDD.Framework.Configuration;
namespace LightBDD.Fixie2.Configuration
{
/// <summary>
/// Extensions allowing to apply test framework default configuration.
/// </summary>
public static class TestFrameworkConfigurationExtensions
{
/// <summary>
/// Adds Fixie specific stack trace member exclusions.
/// </summary>
/// <param name="formatter">Formatter.</param>
/// <returns>Formatter.</returns>
public static DefaultExceptionFormatter WithTestFrameworkDefaults(this DefaultExceptionFormatter formatter)
{
return formatter;
}
/// <summary>
/// Appends LightBDD.Fixie2 default scenario progress notifiers.
/// </summary>
public static ScenarioProgressNotifierConfiguration AppendFrameworkDefaultProgressNotifiers(this ScenarioProgressNotifierConfiguration configuration)
{
return configuration
.AppendNotifierProviders(FixieProgressNotifier.CreateImmediateScenarioProgressNotifier);
}
/// <summary>
/// Appends LightBDD.Fixie2 default feature progress notifiers.
/// </summary>
public static FeatureProgressNotifierConfiguration AppendFrameworkDefaultProgressNotifiers(this FeatureProgressNotifierConfiguration configuration)
{
return configuration.AppendNotifiers(FixieProgressNotifier.CreateFeatureProgressNotifier());
}
}
}
|
using System.Reflection;
using DataDynamics.PageFX.Common.Extensions;
using DataDynamics.PageFX.Flash.Abc;
using DataDynamics.PageFX.Flash.Core;
using DataDynamics.PageFX.Flash.IL;
namespace DataDynamics.PageFX.Flash.Avm
{
internal sealed class BuiltinTypes
{
private readonly AbcFile _abc;
private static readonly QName[] Names;
static BuiltinTypes()
{
var fields = typeof(AvmTypeCode).GetFields(BindingFlags.Public | BindingFlags.Static);
int n = fields.Length;
Names = new QName[n];
for (int i = 0; i < n; ++i)
{
var field = fields[i];
var value = field.GetValue(null);
var index = (int)value;
var attr = field.GetAttribute<QNameAttribute>(false);
Names[index] = attr != null ? attr.Value : new QName(field.Name, KnownNamespace.Global);
}
}
private readonly AbcMultiname[] _types;
public static string GetFullName(AvmTypeCode typeCode)
{
int i = (int)typeCode;
return Names[i].FullName;
}
public BuiltinTypes(AbcFile abc)
{
_abc = abc;
_types = new AbcMultiname[Names.Length];
}
public AbcMultiname this[AvmTypeCode code]
{
get
{
int i = (int)code;
return _types[i] ?? (_types[i] = _abc.DefineName(Names[i]));
}
}
public AbcMultiname Void
{
get { return this[AvmTypeCode.Void]; }
}
public AbcMultiname Boolean
{
get { return this[AvmTypeCode.Boolean]; }
}
public AbcMultiname RealBoolean
{
get
{
if (AvmConfig.BooleanAsInt)
return Int32;
return Boolean;
}
}
public AbcMultiname Int8
{
get { return this[AvmTypeCode.Int8]; }
}
public AbcMultiname UInt8
{
get { return this[AvmTypeCode.UInt8]; }
}
public AbcMultiname Int16
{
get { return this[AvmTypeCode.Int16]; }
}
public AbcMultiname UInt16
{
get { return this[AvmTypeCode.UInt16]; }
}
public AbcMultiname Int32
{
get { return this[AvmTypeCode.Int32]; }
}
public AbcMultiname UInt32
{
get { return this[AvmTypeCode.UInt32]; }
}
public AbcMultiname Int64
{
get { return this[AvmTypeCode.Int64]; }
}
public AbcMultiname UInt64
{
get { return this[AvmTypeCode.UInt64]; }
}
public AbcMultiname Number
{
get { return this[AvmTypeCode.Number]; }
}
public AbcMultiname Float
{
get { return this[AvmTypeCode.Float]; }
}
public AbcMultiname Double
{
get { return this[AvmTypeCode.Double]; }
}
public AbcMultiname Decimal
{
get { return this[AvmTypeCode.Decimal]; }
}
public AbcMultiname String
{
get { return this[AvmTypeCode.String]; }
}
public AbcMultiname Object
{
get { return this[AvmTypeCode.Object]; }
}
public AbcMultiname Error
{
get { return this[AvmTypeCode.Error]; }
}
public AbcMultiname TypeError
{
get { return this[AvmTypeCode.TypeError]; }
}
public AbcMultiname Array
{
get { return this[AvmTypeCode.Array]; }
}
public AbcMultiname Function
{
get { return this[AvmTypeCode.Function]; }
}
public AbcMultiname Class
{
get { return this[AvmTypeCode.Class]; }
}
public AbcMultiname Namespace
{
get { return this[AvmTypeCode.Namespace]; }
}
public AbcMultiname QName
{
get { return this[AvmTypeCode.QName]; }
}
public AbcMultiname XML
{
get { return this[AvmTypeCode.XML]; }
}
public AbcMultiname XMLList
{
get { return this[AvmTypeCode.XMLList]; }
}
public AbcMultiname Vector
{
get { return this[AvmTypeCode.Verctor]; }
}
public InstructionCode GetCoercionInstructionCode(AbcMultiname type)
{
if (ReferenceEquals(type, Int32))
return InstructionCode.Coerce_i;
if (ReferenceEquals(type, UInt32))
return InstructionCode.Coerce_u;
if (ReferenceEquals(type, String))
return InstructionCode.Coerce_s;
if (ReferenceEquals(type, Boolean))
return InstructionCode.Coerce_b;
if (ReferenceEquals(type, Object))
return InstructionCode.Coerce_o;
return InstructionCode.Coerce;
}
}
} |
<?php
namespace Tests\Feature\Domains\Composer;
use Composer\Package\Version\VersionSelector;
use Domains\Composer\InlineComposerDependency;
use Domains\Composer\NoInstallationCandidateFoundException;
use Domains\Composer\PackageVersionToInstallResolver;
use Domains\Composer\VersionSelectorFactory;
use Domains\CreateProjectForm\Sections\Metadata\PhpVersion;
use Domains\Laravel\RelatedPackages\Infrastructure\AwsSdk;
use Mockery\MockInterface;
use Tests\TestCase;
/**
* @covers Domains\Composer\NoInstallationCandidateFoundException
* @covers Domains\Composer\PackageVersionToInstallResolver
* @covers Domains\Composer\VersionSelectorFactory
*/
class VersionResolvingTest extends TestCase
{
/** @test */
public function it_throws_an_exception_for_unknown_packages(): void
{
$this->expectException(NoInstallationCandidateFoundException::class);
$versionSelector = $this->mock(
VersionSelector::class,
function (MockInterface $mock) {
$mock->shouldReceive('findBestCandidate')->andReturn(false);
},
);
$resolver = new PackageVersionToInstallResolver($versionSelector);
$resolver->resolve(collect([
new InlineComposerDependency(
id: 'foo',
name: 'bar',
description: 'test',
href: 'https://google.com',
),
]));
}
/** @test */
public function it_can_resolve_versions_of_known_packages(): void
{
$factory = new VersionSelectorFactory();
$versionSelector = $factory->build(PhpVersion::v8_0);
$resolver = new PackageVersionToInstallResolver($versionSelector);
$versions = $resolver->resolve(collect([new AwsSdk()]));
$this->assertNotEmpty($versions);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.