text
stringlengths
27
775k
```sh var = is_set handle ``` Returns true if the provided value is a set handle. ### Parameters The set handle. ### Return Value True if the provided value is a set handle. ### Examples ```sh handle = set_new 1 2 3 value = is_set ${handle} assert ${value} released = release ${handle} assert ${released} ```
import tensorflow as tf import pandas as pd import numpy as np import numba from sklearn.preprocessing import MinMaxScaler def read_data(): training_data = pd.read_csv('./consumption_train.csv', index_col=0, parse_dates=['timestamp']) test_data = pd.read_csv('./cold_start_test.csv', index_col=0, parse_dates=['timestamp']) meta_data = pd.read_csv('./meta.csv', index_col=0, parse_dates=['timestamp']) submision_format = pd.read_csv('./submision_format', index_col='pred_id', parse_dates=['timestamp']) pred_windows = submision_format[['series_id', 'prediction_window']].drop_duplicates() test_data = test_data.merge(pred_windows, on='series_id') return training_data, test_data, submision_format, pred_windows def _count_cold_start_days(subdf): """ Get the number of times a certain cold-start period appears in the data. """ return (subdf.series_id .value_counts() .divide(24) .value_counts()) def create_lagged_features(df, lag=1, dropna=True): if not type(df) == pd.DataFrame: df = pd.DataFrame(df, columns=['consumption']) def _rename_lag(ser, j): ser.name = ser.name + f'_{j}' return ser # add a column lagged by 'i' steps for i in range(1, lag + 1): df = df.join(df.consumption.shift(i).pipe(_rename_lag, i)) if dropna: df.dropna(inplace=True) return df def prepare_training_data(consumption_series, lag): """ Converts a series of consumption data into a lagged, scaled sample. """ # Scale training data scaler = MinMaxScaler(feature_range=(-1, 1)) #Test (0, 1) and (-1, 1) consumption_vals = scaler.fit_transform(consumption_series.values.reshape(-1, 1)) #Test (0, 1) and (-1, 1) # Convert consumption series to lagged features consumption_lagged = create_lagged_features(consumption_vals, lag=lag) # X, y format taking the first column (original time series) X = consumption_lagged.drop('consumption', axis=1).values y = consumption_lagged.consumption.values # Tensorflow expects 3 dimensional X X = X.reshape(X.shape[0], 1, X.shape[1]) return X, y, scaler def generate_hourly_forecast(num_pred_hours, consumption, model, scaler, lag): """ Uses last hours prediction to generate next for num_pred_hours, initialized by most recent cold start prediction. Inverts scale of predictions before run. """ # Allocate prediction frame pred_scaled = np.zeros(num_pred_hours) # Initial X is last lag values from the cold start X = scaler.transform(consumption.values.reshape(0,1))[-lag:] # Forecast for i in range(num_pred_hours): # Predict scaled value for next time step yhat = model.predict(X.reshape(1, 1, lag), batch_size=1) preds_scaled[i] = yhat # Update X to be latest data plus prediction X = pd.Series(X.ravel()).shift(-1).fillna(yhat).values # Revert scale back to original range hourly_preds = scaler.inverse_transform(pred_scaled.reshape(0,1)).ravel() return hourly_preds @numba.jit(nopython=True) def single_autocorrelation(series, lag): """ Autocorrelation for single data series """ s1 = series[lag:] s2 = series[:-lag] ms1 = np.mean(s1) ms2 = np.mean(s2) ds1 = s1 - ms1 ds2 = s2 - ms2 divider = np.sqrt(np.sum(ds1 * ds1) * np.sqrt(np.sum(ds2 * ds2))) return np.sum(ds1 * ds2) / divider if divider != 0 else 0 @numba.jit(nopython=True) def batch_autocorrelation(data, lag, starts, ends, threshold, backoffset=0): """ Calculate autocorrelation for batch (many time series at once) Args: data: Time series, shape [a,b] lag: Autocorrelation lag starts: Start index for each series ends: End index for each series threshold: Minimun support (ratio of time series length to lag) to calculate meaningful autocorrelation backoffset: Offset from the series end, days Return: autocorrelation, shape [n_series]. If series is too short (support less than threshold), autocorrelation value is Nan """ n_series = data.shape[0] n_days = data.shape[1] max_end = n_days - backoffset corr = np.empty(n_series, dtype=np.float64) support = np.empty(n_series, dtype=np.float64) for i in range(n_series): series = data[i] end = min(ends[i], max_end) real_len = end - starts[i] support[i] = real_len/lag if support[i] > threshold: series = series[starts[i]:end] c_365 = single_autocorrelation(series, lag) c_364 = single_autocorrelation(series, lag-1) c_366 = single_autocorrelation(series, lag+1) # Average value between exact lag and two neares neighborhs for smoothness corr[i] = 0.5 * c_365 + 0.25 * c_364 + 0.25 * c_366 else: corr[i] = np.NaN return corr def return_log1p(series): return np.log1p() def extract_dow(data): features_days = pd.date_range(start=data.timestamp[0][0], end=data.timestamp[0][-1], periods='H') # huber loss def huber(true, pred, delta): loss = np.where(np.abs(true-pred) < delta , 0.5*((true-pred)**2), delta*np.abs(true - pred) - 0.5*(delta**2)) return np.sum(loss) # log cosh loss def logcosh(true, pred): loss = np.log(np.cosh(pred - true)) return np.sum(loss)
### 1.0.1 * bugfix: choked on parsing less with import statements, fixed by upgrading to doiuse 4.0.0 ### 1.0.0 * update to doiuse 3.0.0 ### 0.1.1 * update to postcss 6.0.1 ### 0.1.0 * initial release
; Test case: evaluation of "$" inside macros and eager variables org #2000 db 8192 db 8193 db $ db 1 db 2 dw #ffff org #4000 loop: jr loop
fun main(){ val numbers = listOf(1,2,5,7,8,8,8,9,6) val setOfNumbers = numbers.toSet() println("Set: ${setOfNumbers}") val set1 = setOf(1,2,3) val set2 = mutableSetOf(3,2,1) println("$set1 == $set2: ${set1 == set2}") //true (mesmo conjunto de items) println("Contains 10: ${setOfNumbers.contains(10)}") } /* Set: [1, 2, 5, 7, 8, 9, 6] [1, 2, 3] == [3, 2, 1]: true Contains 7: false */
import React from 'react' import {Input, Box} from "@chakra-ui/react" import {filterNotes} from "../../redux/noteSlice" import { useDispatch } from 'react-redux' const Search = () => { const dispatch = useDispatch(); const handleChange = (e) => { dispatch(filterNotes(e.target.value)); } return ( <Box> <Input maxW={320} type="text" placeholder="Search" onChange={handleChange} bg="white" /> </Box> ) } export default Search;
import 'package:flutter/material.dart'; import 'package:my_shop_app/pages/cart_page.dart'; import 'package:my_shop_app/providers/products_provider.dart'; import 'package:my_shop_app/widgets/app_drawer.dart'; import 'package:my_shop_app/widgets/products_grid.dart'; import 'package:my_shop_app/widgets/badge.dart'; import 'package:my_shop_app/providers/cart.dart'; import 'package:provider/provider.dart'; enum Show { favorites, all, } class ProductsOverViewPage extends StatefulWidget { @override _ProductsOverViewPageState createState() => _ProductsOverViewPageState(); } class _ProductsOverViewPageState extends State<ProductsOverViewPage> { bool _showFavorites = false; var isInit = true; var isLoading = false; @override void didChangeDependencies() { if (isInit) { setState(() { isLoading = true; }); Provider.of<Products>(context).fetchData().then((_) { setState(() { isLoading = false; }); }); } isInit = false; super.didChangeDependencies(); } @override Widget build(BuildContext context) { return Scaffold( appBar: AppBar( title: Text('MyShop'), actions: [ PopupMenuButton( icon: Icon(Icons.more_vert), onSelected: (Show selectedValue) { switch (selectedValue) { case Show.favorites: setState(() { _showFavorites = true; }); break; case Show.all: setState(() { _showFavorites = false; }); break; } }, itemBuilder: (_) => [ PopupMenuItem( child: Text('only favorites'), value: Show.favorites, ), PopupMenuItem( child: Text('show all'), value: Show.all, ) ], ), Consumer<Cart>( builder: (_, cart, ch) => Badge( child: ch as Widget, value: cart.itemCount.toString(), ), child: IconButton( icon: Icon(Icons.shopping_cart), onPressed: () { Navigator.pushNamed(context, CartPage.route); }, ), ), ], ), drawer: AppDrawer(), body: isLoading == true ? Center( child: CircularProgressIndicator( color: Theme.of(context).accentColor, ), ) : ProductsGrid(_showFavorites), ); } }
export * from './InboundTransporter' export * from './OutboundTransporter' export * from './HttpOutboundTransporter'
#!/bin/sh install_qt() { git clone "https://github.com/qt/qt5" cd qt5 || exit perl init-repository -f --module-subset="qtbase,qttools,qtsvg,qttranslations" ./configure -opensource -confirm-license -c++std c++17 \ -nomake examples -nomake tests \ -no-opengl -no-dbus -no-widgets -no-gui \ -static -openssl-linked cmake --build . --parallel cmake --install . cd .. } git clone --branch "5.15.2" "https://github.com/qt/qt5"
## "Тихий" режим У Svelte Easyroute есть третий режим - "silent". Вы можете использовать его, если не хотите менять URL в строке браузера. ```javascript export var router = new Router({ mode: "silent", routes: [ ... ] }) ``` У этого режима есть своя история маршрутов. Используйте эти два метода: ```javascript export let router router.back() // перемещает на один маршрут назад router.go(1) // перемещает на N маршрутов назад или вперёд ``` **Почему этот режим не использует History API?** Потому что History API не поддерживается в некоторых старых версиях браузеров. Однако, вы можете манипулировать историей в навигационных хуках :)
const { User } = require('../models'); const userData = [ { user_name:'Olivia', contact_number: '8047649572', user_id: 1, }, { user_name:'Hilary', contact_number: '8593793769', user_id: 2, }, { user_name:'Patrick', contact_number: '8701734965', user_id: 3, }, { user_name:'Victor', contact_number: '7348569289', user_id: 4, }, { user_name:'Sarah', contact_number: '9087369284', user_id: 5, }, { user_name:'John', contact_number: '4803778942', user_id: 6, }, { user_name:'Alex', contact_number: '3208904364', user_id: 7, }, { user_name:'Tommy', contact_number: '9409840759', user_id: 8, }, ]; const seedUser = () => User.bulkCreate(userData); module.exports = seedUser;
module TableMappings.BancosBaseDb ( getOne, save, update, delete, selOneSql, savSql, updSql, delSql, selOneCmd, savCmd, updCmd, delCmd ) where import Database.HDBC import DataAccess.Commands import DataAccess.Entities import TableMappings.Types.Banco import DataAccess.PageResult import Data.UUID instance ToType Banco where toType row = Banco { idCuenta = fromSql (row!!0)::Int, guidCuenta = read (fromSql (row!!1)::String), banco = fromSql (row!!2)::String, clabe = fromSql (row!!3)::Maybe String, nocuenta = fromSql (row!!4)::Maybe String, beneficiario = fromSql (row!!5)::String, emailNotificacion = fromSql (row!!6)::Maybe String, activo = fromSql (row!!9)::Bool } instance FromType Banco where fromType p = [toSql $ banco p, toSql $ clabe p, toSql $ nocuenta p, toSql $ beneficiario p, toSql $ emailNotificacion p, toSql $ activo p, toSql $ toString (guidCuenta p), toSql $ idCuenta p] selOneSql :: SqlString selOneSql = "SELECT * FROM cuentas where id = ?" selOneCmd :: Int -> Command selOneCmd key = Command selOneSql [toSql key] savSql :: SqlString savSql = "INSERT INTO cuentas \ \ (banco, clabe, nocuenta, beneficiario, emailnotificacion, nombre, efectivo, activo, guid) \ \ values (?,?,?,?,?,'',false,?,?)" savCmd :: Banco -> Command savCmd b = Command savSql (init $ fromType b) updSql :: SqlString updSql = "UPDATE cuentas SET \ \ banco=?, clabe=?, nocuenta=?, beneficiario=?, emailnotificacion=?, activo=? \ \ where guid=? and id=?" updCmd :: Banco -> Command updCmd b = Command updSql (fromType b) delSql :: SqlString delSql = "UPDATE cuentas SET activo=? where id=?" delCmd :: Int -> Command delCmd key = Command delSql [toSql False, toSql key] getOne :: Int -> IO (Maybe Banco) getOne = selectOne . selOneCmd save :: (Maybe Banco) -> IO Integer save = persist savCmd update :: (Maybe Banco) -> IO Integer update = persist updCmd delete :: Int -> IO Integer delete = execNonSelQuery . delCmd
using System.Collections; using UnityEngine; using UnityEngine.SceneManagement; using UnityEngine.UI; using Photon.Realtime; using Photon.Pun; using ExitGames.Client.Photon; public class SceneLoader : MonoBehaviour { public GameObject LoadingScreen; public void NowLoading() { LoadingScreen.SetActive(true); } public void LoadingOver() { LoadingScreen.SetActive(false); } public void LoadScene(string sceneName) { StartCoroutine(loadAsyncWithoutSfx(sceneName)); } public void LoadScene(string sceneName, float sfxLength) { StartCoroutine(loadAsyncWithSfx(sceneName, sfxLength)); } IEnumerator loadAsyncWithoutSfx(string sceneName) { NowLoading(); PhotonNetwork.LoadLevel(sceneName); if(PhotonNetwork.LevelLoadingProgress == 0.9f) { LoadingOver(); } yield return null; } IEnumerator loadAsyncWithSfx(string sceneName, float sfxLength) { yield return new WaitForSeconds(sfxLength); NowLoading(); PhotonNetwork.LoadLevel(sceneName); if(PhotonNetwork.LevelLoadingProgress == 0.9f) { LoadingOver(); } } }
package com.aqrlei.helper.log.engine import com.aqrlei.helper.log.config.ILogConfig import com.aqrlei.helper.log.control.ILogControl import com.aqrlei.helper.log.printer.ILogPrinter /** * created by AqrLei on 2020/5/19 */ interface ILogEngine { fun v(tag: String, msg: String, tr: Throwable? = null) fun v(msg: String, tr: Throwable? = null) fun d(tag: String, msg: String, tr: Throwable? = null) fun d(msg: String, tr: Throwable? = null) fun i(tag: String, msg: String, tr: Throwable? = null) fun i(msg: String, tr: Throwable? = null) fun w(tag: String, msg: String, tr: Throwable? = null) fun w(msg: String, tr: Throwable? = null) fun e(tag: String, msg: String, tr: Throwable? = null) fun e(msg: String, tr: Throwable? = null) fun config(logConfig:ILogConfig):ILogEngine fun logConfig():ILogConfig fun logControl():ILogControl fun logPrinters():Set<ILogPrinter> fun setLogPrinters(vararg logPrinter:ILogPrinter):ILogEngine }
package de.htwg.zeta.common.models.project.gdsl.style case class Line( color: Color, style: LineStyle, width: Int ) object Line { val defaultColor: Color = Color.defaultColor val defaultStyle: LineStyle = Solid() val defaultWidth: Int = 1 val defaultLine: Line = Line( defaultColor, defaultStyle, defaultWidth ) } sealed trait LineStyle case class Dotted() extends LineStyle case class Solid() extends LineStyle case class Dashed() extends LineStyle
using System.Linq; using System.Collections.Generic; using System.Threading.Tasks; using FluentValidation; using Insolvency.CalculationsEngine.Redundancy.BL.DTOs.APPA; using Insolvency.CalculationsEngine.Redundancy.Common.Extensions; using System.Collections; using Insolvency.CalculationsEngine.Redundancy.BL.DTOs.Common; namespace Insolvency.CalculationsEngine.Redundancy.API.Infrastructure.Middlewares.Validators { public class APPACalculationRequestValidator : AbstractValidator<APPACalculationRequestModel> { public APPACalculationRequestValidator() { RuleForEach(req => req.Ap) .SetValidator(new ArrearsOfPayCalculationRequestValidator()); RuleFor(req => req.Pa) .SetValidator(new ProtectiveAwardCalculationRequestValidator()) .When(req => req.Pa != null); RuleFor(req => req.Ap) .NotNull() .WithMessage($"Neither Arrears Of Pay nor Protective Award data has been provided") .NotEmpty() .WithMessage($"Neither Arrears Of Pay nor Protective Award data has been provided") .When(req => req.Pa == null); RuleFor(req => req.Ap) .Must(NoOverlappingPeriodsForRp1OrRp14a) .WithMessage($"The same day appears in more than one Arrears Of Pay period") .When(req => req.Ap != null); RuleFor(req => req) .Must(RP1DataPresent) .WithMessage($"Arrears Of Pay RP1 data has not been provided") .When(req => req.Ap != null); RuleFor(req => req) .Must(RP14aDataPresent) .WithMessage($"Arrears Of Pay RP14a data has not been provided") .When(req => req.Ap != null); } private bool NoOverlappingPeriodsForRp1OrRp14a(List<ArrearsOfPayCalculationRequestModel> apList) { return NoOverlappingPeriods(apList, InputSource.Rp1) && NoOverlappingPeriods(apList, InputSource.Rp14a); } private bool NoOverlappingPeriods(List<ArrearsOfPayCalculationRequestModel> fullList, string inputSource) { var apList = fullList.Where(r => r.InputSource == inputSource).ToArray(); // test Rp1/Rp14a seprately for (int i = 0; i < apList.Count(); i++) { for (int j = 0; j < apList.Count(); j++) { if (i != j && apList[i].UnpaidPeriodFrom.Date.DoRangesIntersect( apList[i].UnpaidPeriodTo.Date, apList[j].UnpaidPeriodFrom.Date, apList[j].UnpaidPeriodTo.Date).Result) return false; } } return true; } private bool RP1DataPresent(APPACalculationRequestModel appa) { return appa.Ap.Count(x => x.InputSource == InputSource.Rp14a) == 0 || appa.Ap.Count(x => x.InputSource == InputSource.Rp1) > 0 || appa.Rp1NotRequired; } private bool RP14aDataPresent(APPACalculationRequestModel appa) { return appa.Ap.Count(x => x.InputSource == InputSource.Rp1) == 0 || appa.Ap.Count(x => x.InputSource == InputSource.Rp14a) > 0 || appa.Rp14aNotRequired; } } }
=begin 1.solicitar el peso 2.Solicitar la estatura de la persona 3.Calcular el IMC 4.Determinar estado del peso a.si el IMC < 18.5 baja de peso b. si el IMC 18.5 y 24.99 esta normal c. si el IMC > = 25, tiene sobrepeso 5.Imprimir el peso =end print "Ingresa tu peso en (kg): " weight=gets.chomp.to_f print "Ingresa tu estatura en (Height)" height =gets.chomp.to_f bmi = weight / height**2 puts print "#{bmi.round(2)}" if bmi< 18.5 puts " (Baja de peso)" elsif bmi < 25 puts " (Normal)" else puts " (Sobrepeso)" end
using Microsoft.VisualStudio.TestTools.UnitTesting; using Team8Project.Common.Enums; using Team8Project.Contracts; using Team8Project.Core; using Team8Project.Models.Characters; namespace Team8Project.Tests.Core.FactoryTests { [TestClass] public class Factory_Should { [TestMethod] public void ReturnHero_WhenCreateWarriorIsCalled() { // Arrange var factory = new Factory() ; // Act var hero = factory.CreateWarrior("Hero", HeroClass.Warrior, 200, 10, 15); // Assert Assert.IsInstanceOfType(hero, typeof(IHero)); } [TestMethod] public void ReturnHero_WhenCreateAssasinIsCalled() { // Arrange var factory = new Factory(); // Act var hero = factory.CreateAssasin("Hero", HeroClass.Assasin, 200, 10, 15); // Assert Assert.IsInstanceOfType(hero, typeof(IHero)); } [TestMethod] public void ReturnHero_WhenCreateMageIsCalled() { // Arrange var factory = new Factory(); // Act var hero = factory.CreateMage("Hero", HeroClass.Mage, 200, 10, 15); // Assert Assert.IsInstanceOfType(hero, typeof(IHero)); } [TestMethod] public void ReturnHero_WhenCreateClericIsCalled() { // Arrange var factory = new Factory(); // Act var hero = factory.CreateCleric("Hero", HeroClass.Cleric, 200, 10, 15); // Assert Assert.IsInstanceOfType(hero, typeof(IHero)); } } }
Program Daynames; {$apptype console} uses ArrayP in '..\source\DAYNAMES\ArrayP.pas'; begin execute; end.
/* eslint-disable max-lines, @typescript-eslint/no-use-before-define */ import type { Styles } from 'nightingale-types'; export interface FormatObjectOptions { padding?: string; maxDepth?: number; } export type StyleFn = (styles: Styles, value: string) => string; export type ObjectStyles<Keys extends string = string> = Record<Keys, Styles>; const noStyleFn: StyleFn = (styles: Styles, value: string): string => value; interface InternalFormatParams { padding: string; depth: number; maxDepth: number; objects: Set<unknown>; } interface FormattedKey { stringKey: string; formattedKey: string; } type FormatKey<Key> = ( key: Key, styleFn: StyleFn, internalFormatParams: InternalFormatParams, ) => FormattedKey; interface Value<Key> { key: Key; value: unknown; } interface FormattedValue { stringValue: string; formattedValue: string; } type Values<Key> = Value<Key>[]; interface InternalFormatIteratorParams<Key> { prefix: string; suffix: string; formatKey: FormatKey<Key>; prefixSuffixSpace?: string; } function tryStringify(arg: unknown): string { try { return JSON.stringify(arg).replace(/\\n/g, '\n'); } catch { return '[Circular]'; } } const sameRawFormattedValue = (value: string): FormattedValue => ({ stringValue: value, formattedValue: value, }); function internalFormatValue( value: unknown, styleFn: StyleFn, styles: Styles, { padding, depth, maxDepth, objects }: InternalFormatParams, ): FormattedValue { const typeofValue = typeof value; if (!styles) { if (value == null) { styles = ['cyan']; } else { switch (typeofValue) { case 'undefined': styles = ['cyan']; break; case 'boolean': styles = ['green']; break; case 'number': styles = ['yellow']; break; case 'bigint': styles = ['red']; break; case 'string': styles = ['orange']; break; case 'symbol': styles = ['magenta']; break; case 'object': case 'function': break; } } } let stringValue: string; if (value === null) { stringValue = 'null'; } else if (value === undefined) { stringValue = 'undefined'; } else if (typeofValue === 'boolean') { // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call stringValue = (value as any).toString() as string; } else if ((value as () => unknown).constructor === Object) { if (depth >= maxDepth) { stringValue = '{Object...}'; } else { return internalFormatObject( value as Record<string, unknown>, styleFn, undefined, { padding, depth: depth + 1, maxDepth, objects, }, ); } } else if (Array.isArray(value)) { if (depth >= maxDepth) { stringValue = '[Array...]'; } else { return internalFormatArray(value, styleFn, { padding, depth: depth + 1, maxDepth, objects, }); } } else if (value instanceof Error) { const stack = value.stack; stringValue = stack?.startsWith(value.message) ? stack : `${value.message}\n${stack || ''}`; } else if (value instanceof Map) { const name = value.constructor.name; if (depth >= maxDepth) { stringValue = `{${name}...}`; } else { return internalFormatMap(name, value, styleFn, { padding, depth: depth + 1, maxDepth, objects, }); } } else if (typeofValue === 'bigint') { stringValue = (value as bigint).toString(); } else if (typeofValue === 'symbol') { stringValue = (value as symbol).toString(); } else if (value instanceof Set) { const name = value.constructor.name; if (depth >= maxDepth) { stringValue = `{${name}...}`; } else { return internalFormatSet(name, value, styleFn, { padding, depth: depth + 1, maxDepth, objects, }); } } else if (value instanceof WeakMap) { stringValue = '{WeakMap...}'; } else if (value instanceof WeakSet) { stringValue = '{WeakSet...}'; } else { stringValue = tryStringify(value); } const formattedValue = styleFn(styles, stringValue); return { stringValue, formattedValue, }; } const separator = ','; const internalFormatKey: FormatKey<string> = ( key: string, styleFn: StyleFn, internalFormatParams: InternalFormatParams, ): FormattedKey => { return { stringKey: `${key}: `, formattedKey: `${styleFn(['gray-light', 'bold'], `${key}:`)} `, }; }; const internalNoKey: FormatKey<undefined> = ( key: string | undefined, styleFn: StyleFn, internalFormatParams: InternalFormatParams, ): FormattedKey => { return { stringKey: '', formattedKey: '' }; }; const internalFormatMapKey: FormatKey<unknown> = ( key: unknown, styleFn: StyleFn, internalFormatParams: InternalFormatParams, ): FormattedKey => { const { stringValue, formattedValue } = internalFormatValue( key, noStyleFn, undefined, internalFormatParams, ); return { stringKey: `${stringValue} => `, formattedKey: `${styleFn(['gray-light', 'bold'], `${formattedValue}:`)} `, }; }; const internalFormatIterator = <Key>( values: Values<Key>, styleFn: StyleFn, objectStyles: ObjectStyles | undefined, { padding, depth, maxDepth, objects }: InternalFormatParams, { prefix, suffix, prefixSuffixSpace = ' ', formatKey, }: InternalFormatIteratorParams<Key>, ): FormattedValue => { let breakLine = false; const formattedSeparator = (): string => styleFn(['gray'], separator); const valuesMaxIndex = values.length - 1; const formattedValues: FormattedValue[] = values.map( ({ key, value }, index: number) => { const nextDepth = depth + 1; const internalFormatParams = { padding, depth: nextDepth, maxDepth, objects, }; // key must be formatted before value (browser-formatter needs order) const { stringKey, formattedKey } = formatKey( key, styleFn, internalFormatParams, ); let { stringValue, formattedValue } = internalFormatValue( value, styleFn, key && objectStyles ? objectStyles[key as unknown as string] : undefined, internalFormatParams, ); if ( stringValue && (stringValue.length > 80 || stringValue.includes('\n')) ) { breakLine = true; stringValue = stringValue.replace(/\n/g, `\n${padding}`); formattedValue = formattedValue.replace(/\n/g, `\n${padding}`); } return { stringValue: stringKey + stringValue + (index === valuesMaxIndex ? '' : separator), formattedValue: formattedKey + formattedValue + (index === valuesMaxIndex ? '' : formattedSeparator()), // note: we need to format the separator for each values for browser-formatter }; }, ); return { stringValue: prefix + formattedValues .map( breakLine ? (v) => `\n${padding}${v.stringValue}` : (fv) => fv.stringValue, ) .join(breakLine ? '\n' : ' ') + suffix, formattedValue: `${prefix}${ breakLine ? '' : prefixSuffixSpace }${formattedValues .map( breakLine ? (v) => `\n${padding}${v.formattedValue}` : (v) => v.formattedValue, ) .join(breakLine ? '' : ' ')}${ breakLine ? ',\n' : prefixSuffixSpace }${suffix}`, }; }; function internalFormatObject( object: Record<string, unknown>, styleFn: StyleFn, objectStyles: ObjectStyles | undefined, { padding, depth, maxDepth, objects }: InternalFormatParams, ): FormattedValue { if (objects.has(object)) { return sameRawFormattedValue('{Circular Object}'); } const keys: string[] = Object.keys(object); if (keys.length === 0) { return sameRawFormattedValue('{}'); } objects.add(object); const result = internalFormatIterator( keys.map((key) => ({ key, value: object[key] })), styleFn, objectStyles, { padding, depth, maxDepth, objects }, { prefix: '{', suffix: '}', formatKey: internalFormatKey }, ); objects.delete(object); return result; } function internalFormatMap( name: string, map: Map<unknown, unknown>, styleFn: StyleFn, { padding, depth, maxDepth, objects }: InternalFormatParams, ): FormattedValue { if (objects.has(map)) { return sameRawFormattedValue(`{Circular ${name}}`); } const keys = [...map.keys()]; if (keys.length === 0) { return sameRawFormattedValue(`${name} {}`); } objects.add(map); const result = internalFormatIterator( keys.map((key) => ({ key, value: map.get(key) })), styleFn, undefined, { padding, depth, maxDepth, objects }, { prefix: `${name} {`, suffix: '}', formatKey: internalFormatMapKey }, ); objects.delete(map); return result; } function internalFormatArray( array: unknown[], styleFn: StyleFn, { padding, depth, maxDepth, objects }: InternalFormatParams, ): FormattedValue { if (objects.has(array)) { return sameRawFormattedValue('{Circular Array}'); } if (array.length === 0) { return sameRawFormattedValue('[]'); } objects.add(array); const result = internalFormatIterator( array.map((value) => ({ key: undefined, value })), styleFn, undefined, { padding, depth, maxDepth, objects }, { prefix: '[', suffix: ']', prefixSuffixSpace: '', formatKey: internalNoKey, }, ); objects.delete(array); return result; } function internalFormatSet( name: string, set: Set<unknown>, styleFn: StyleFn, { padding, depth, maxDepth, objects }: InternalFormatParams, ): FormattedValue { if (objects.has(set)) { return sameRawFormattedValue(`{Circular ${name}}`); } const values = [...set.values()]; if (values.length === 0) { return sameRawFormattedValue(`${name} []`); } objects.add(set); const result = internalFormatIterator( values.map((value) => ({ key: undefined, value })), styleFn, undefined, { padding, depth, maxDepth, objects }, { prefix: `${name} [`, suffix: ']', formatKey: internalNoKey }, ); objects.delete(set); return result; } export function formatObject( object: Record<string, unknown>, styleFn: StyleFn = noStyleFn, objectStyles?: ObjectStyles, { padding = ' ', maxDepth = 10 }: FormatObjectOptions = {}, ): string { const { formattedValue: result } = internalFormatObject( object, styleFn, objectStyles, { padding, maxDepth, depth: 0, objects: new Set(), }, ); if (result === '{}') { return ''; } return result; }
import express from 'express' import userRoutes from './user.route' import listRoutes from './list.route' import authRoutes from './auth.route' import adminRoutes from './admin.route' const router = express.Router() // eslint-disable-line new-cap router.get('/test', (req, res) => res.send('OK')) // mount user routes at /users router.use('/user', userRoutes) router.use('/list', listRoutes) // mount auth routes at /auth router.use('/auth', authRoutes) router.use('/admin', adminRoutes) export default router
import tensorflow as tf class siamese: # Create model def __init__(self): self.x1 = tf.placeholder(tf.float32, [None, 784]) self.x2 = tf.placeholder(tf.float32, [None, 784]) with tf.variable_scope("siamese") as scope: self.o1 = self.network(self.x1) scope.reuse_variables() self.o2 = self.network(self.x2) # Create loss self.y_ = tf.placeholder(tf.float32, [None]) self.loss = self.loss_with_spring() def network(self, x): weights = [] fc1 = self.fc_layer(x, 1024, "fc1") ac1 = tf.nn.relu(fc1) fc2 = self.fc_layer(ac1, 1024, "fc2") ac2 = tf.nn.relu(fc2) fc3 = self.fc_layer(ac2, 2, "fc3") return fc3 def fc_layer(self, bottom, n_weight, name): assert len(bottom.get_shape()) == 2 n_prev_weight = bottom.get_shape()[1] initer = tf.truncated_normal_initializer(stddev=0.01) W = tf.get_variable(name+'W', dtype=tf.float32, shape=[n_prev_weight, n_weight], initializer=initer) b = tf.get_variable(name+'b', dtype=tf.float32, initializer=tf.constant(0.01, shape=[n_weight], dtype=tf.float32)) fc = tf.nn.bias_add(tf.matmul(bottom, W), b) return fc def loss_with_spring(self): margin = 5.0 labels_t = self.y_ labels_f = tf.subtract(1.0, self.y_, name="1-yi") # labels_ = !labels; eucd2 = tf.pow(tf.subtract(self.o1, self.o2), 2) eucd2 = tf.reduce_sum(eucd2, 1) eucd = tf.sqrt(eucd2+1e-6, name="eucd") C = tf.constant(margin, name="C") # yi*||CNN(p1i)-CNN(p2i)||^2 + (1-yi)*max(0, C-||CNN(p1i)-CNN(p2i)||^2) pos = tf.multiply(labels_t, eucd2, name="yi_x_eucd2") # neg = tf.mul(labels_f, tf.sub(0.0,eucd2), name="yi_x_eucd2") # neg = tf.mul(labels_f, tf.maximum(0.0, tf.sub(C,eucd2)), name="Nyi_x_C-eucd_xx_2") neg = tf.multiply(labels_f, tf.pow(tf.maximum(tf.subtract(C, eucd), 0), 2), name="Nyi_x_C-eucd_xx_2") losses = tf.add(pos, neg, name="losses") loss = tf.reduce_mean(losses, name="loss") return loss def loss_with_step(self): margin = 5.0 labels_t = self.y_ labels_f = tf.subtract(1.0, self.y_, name="1-yi") # labels_ = !labels; eucd2 = tf.pow(tf.subtract(self.o1, self.o2), 2) eucd2 = tf.reduce_sum(eucd2, 1) eucd = tf.sqrt(eucd2+1e-6, name="eucd") C = tf.constant(margin, name="C") pos = tf.multiply(labels_t, eucd, name="y_x_eucd") neg = tf.multiply(labels_f, tf.maximum(0.0, tf.subtract(C, eucd)), name="Ny_C-eucd") losses = tf.add(pos, neg, name="losses") loss = tf.reduce_mean(losses, name="loss") return loss
<?php namespace Jiromm\Battle\Exceptions; class ArmyBuilderException extends BattleException { }
import React, { useState, useEffect } from 'react'; import Link from 'next/link'; import { connect } from 'react-redux'; import { sendErrorMessage, getStats } from '../../redux/actions'; import { IContext } from '../../@types'; import { redirectIfNotAuthenticated } from '../../utils/session'; import { err } from '../../utils'; import { Role } from '../../../shared/user.enums'; type Props = { flashError: (msg: string, ctx?: IContext) => void }; export const Dashboard = ({ flashError }: Props) => { const [state, setState] = useState({ total: 0, pending: 0, accepted: 0, rejected: 0, waitlist: 0, loading: true }); useEffect(() => { getStats() .then(stats => setState({ ...stats, loading: false })) .catch(error => { setState({ ...state, loading: false }); flashError(err(error)); }); }, []); if (state.loading) return <span>Loading...</span>; return ( <div> <h3>Dashboard</h3> <br /> <br /> Application Stats: <br /> Total: {state.total} <br /> Pending: {state.pending} <br /> Accepted: {state.accepted} <br /> Rejected: {state.rejected} <br /> Waitlist: {state.waitlist} <br /> <br /> Admin Actions: <br /> <Link href="/applications" prefetch> <a>Applications</a> </Link> <br /> <Link href="/announcement"> <a>Post Announcement</a> </Link> <br /> <Link href="/announcements"> <a>View Announcements</a> </Link> <br /> <Link href="/checkin"> <a>Check In</a> </Link> </div> ); }; Dashboard.getInitialProps = async (ctx: IContext) => { if (redirectIfNotAuthenticated('/', ctx, { roles: [Role.EXEC] })) return {}; }; export const DashboardPage = connect( null, { flashError: sendErrorMessage } )(Dashboard);
--- title: Chapter1 - Introduction(1) categories: - OS last_modified_at: 2022-05-07 tags: - OS toc: true toc_sticky: true --- ### Chapter 1 - Introduction(1) OS란? 쉽게 말해 컴퓨터 하드웨어와 사용자간에 존재하는 프로그램 ![image](https://user-images.githubusercontent.com/87630540/167258678-b49d3472-710c-450e-a81f-2cd6f944975d.png) WHY? 컴퓨터 시스템을 사용하기 쉽게 하기 위해(convenience) 컴퓨터 하드웨어를 효과적으로 사용하기 위해(efficiency) #### OS Definition(보는 관점에 따라) 1. OS is resource allocator - 충돌하는 서비스에 대해 어떻게 하면 효과적이고 공정한 자원을 할당할 지 결정해줌 2. OS is control program - 실행을 대기하고 있는 여러 사용자 프로그램을 어떤 순서로 실행 시킬지 제어 ex) CPU scheduler #### Computer System Organization ![image](https://user-images.githubusercontent.com/87630540/167258940-861e7d9c-c8f1-45f2-89af-621919ae6db1.png) 하나 또는 더 많은 CPU들이나, device controller들은 공유 메모리에 접근 하기 위해 공통의 bus에 연결함 - CPU가 어떤 프로그램을 실행할 때, disk controller는 disk에다가 read/write 가능 - 동시 실행이 가능하다는 뜻 - local buffer는 각 device의 controller 에 각각 있음, CPU가 데이터를 메인 메모리에서 지역버퍼로, 지역버퍼에서 메인메모리로 데이터를 이동시킴 - CPU가 명령한 것을 register에서 받아 들임 - device에서 CPU가 가져오라는 데이터 가져와서 device controller의 local buffer에 저장 - device controller는 CPU에게 "너가 하라는 명령 끝냈다"라는 인터럽트를 발생시킴으로써 끝났다라고 알려줌 **Event driven system vs Controller driven system ?** - Event driven system : 능동적으로 일을 하는게(외부에서 자극이 없으면 아무것도 안함) 아니라 외부에서 어떤 서비스 요청이 들어올 때 서비스 요청에 대한 서비스를 제공, - Controller driven system : 사용자가 작성한 알고리즘 순서에 따라 실행 Interrupt (event 발생을 CPU에게 통보하는 가장 중요한 수단) Software Interrupt - 예를 들어 systemcall() - Trap : OS 함수를 호출하기 위해서 발생되는 인터럽트, 실행모드가 사용자 -> 커널로 먼저 바뀌어야함 - exception : 예외 처리를 하기 위한 인터럽트 ![image](https://user-images.githubusercontent.com/87630540/167259006-6def21db-85f8-4518-a2f6-699fe3d070a3.png) ISR (인터럽트 서비스 루틴) : Interrupt vector를 통해서 인터럽트 서비스 루틴에 대한 제어를 전달 Interrupt vector : 인터럽트 발생시 처리해야 할 일에 대한 핸들러 즉 인터럽트 핸들러의 주소를 인터럽트 별로 보관하고 있는 테이블 Running snapshot : 인터럽트가 걸렸을 시점의 실행되고 있었던 상태에 대한 모든 정보(빨간 동그라미), ISR 실행 후 리턴시 원래 snapshot 복원 **Interrupt Handling** 어느 인터럽트가 발생했는지 결정하는 방법 polling : cpu에 interrupt가 걸렸다는 사실만 통보, 누가 걸었는지는 모름 ![image](https://user-images.githubusercontent.com/87630540/167259074-e95d25b2-24b2-43c5-9947-71c5fca2fd2d.png) vectored interrupt system : interrupt가 어떤 device가 걸면 알았다고 응답을 보냄 그러고 나서 "너의 interrupt vector를 보여줘라.. " **Interrupt Timeline** ![image](https://user-images.githubusercontent.com/87630540/167259095-61b598db-8c59-42cf-b743-3a25e42cdedc.png) 이해 쉽죠잉? ISR이 실행되는 동안에는 사용자 실행프로그램은 잠시 중단 #### Storage-Devide Hierarchy ![image](https://user-images.githubusercontent.com/87630540/167259117-e35ec274-997b-4152-b0b4-b7f91c833585.png) - registers : cpu안에 있는 가장 빠른 memory - cache : cpu에서 main memory로 데이터를 읽어 오는데 한 번 읽은 걸 버리는게 아니라 cache에 저장해서 다음에 똑같은 장소에 갈 때 main memory가 아니라 cache를 먼저 들여다 보고 해당 데이터가 있는 지 확인, 있으면 main memery로 갈 필요 없이 cache에서 꺼내 씀, 각 계층 사이에 존재할 수 있으며, 이러한 역할로 speed gap을 보완 해줌 - solid-state disk(ssd) : secondary storage인 hard disk 보다는 빠르고, 전원이 꺼져도 컨텐츠가 있음 (nonvolatile) - hard disk: 메인 메모리의 확장으로 사용할 수 있는 공간인 secondary storage로 가장 많이 쓰임 #### Caching (더 빠른 저장 공간으로 정보를 복사함) - 각 계층 사이에 존재 가능 - caching 성능이 좋아질 수록, 비용은 증가 - 느린 device에서 가져온 copies data를 한번 쓰고 버리는게 아니라 slower한 device보다 빠른 storage에 저장 했다가 먼저 확인을 한 다음 있으면 바로 쓴다 - Cache size 와 replacement policy 캐쉬의 성능에 크게 영향을 줌 (replacement는 cache가 꽉 차면 안 쓰는 데이터를 버리고 공간을 만듬) **To start an interrupt driven I/O operation** 1. CPU 내의 OS안에 있는 device driver가 명령을 내림 2. device controller내에 있는 register에 명령을 저장 3. device controller는 register에 있는 내용을 확인하면서 명령을 이해 4. device controller는 local buffer와 외부의 device사이에서 명령을 전달 5. 명령 전달이 완료가 되면 device controller는 device driver에게 인터럽트를 걸어서 명령전달이 끝났다고 통보함 6. device driver는 리턴된 결과(returning data or pointer to the data, status info)를 OS에게 전달 **DMA (Direct Memory Access Structure)** CPU의 간섭 없이 Device controller가 직접 메인 메모리와 외부 디바이스 사이에서 read/write하는 방법 ![image](https://user-images.githubusercontent.com/87630540/167259302-c8de14ef-5e08-4ac8-848f-7cdcfd09a7e9.png) 기본적으로 폰 노이만 아키텍쳐에서 프로그램이 실행되는 방법 - Main memory에 먼저 명령과 데이터들이 탑재되어야함 - location에 의해서 addresable해야함 (main memory에 깔아놓은 데이터들을 랜덤 access 할 수 있어야함) - 실행은 순차적으로 진행됨 ![image](https://user-images.githubusercontent.com/87630540/167259318-e1dc4c8b-6348-48df-b6b1-a70f8d166836.png) - Memory 에는 데이터와 프로그램들이 저장되어 있음 - CPU내에 Control Unit은 main memory에서 읽어온 instruction 을 CPU가 해석해서 ALU에게 계산을 하라고 명령함 ALU는 산술논리연산 장치로써 계산을 함 - Input-outputdms I/O device controller 의 예, 외부 디바이스와 정보를 주고 받음 **Instruction Cycle** ![image](https://user-images.githubusercontent.com/87630540/167259336-e986cd0e-dc35-4369-b8ac-8cdd08119b6a.png) Fetch : Control unit이 main memory에서 다음에 실행할 instruction을 가져옴 Execute : Instruction 실행 Fetch - Execute 는 1cycle - 1cycle 즉, 명령이 불러와지고 실행되는 동안에는 Interrput가 절대 x - 1cycle 돌고 다음 2번째 cycle을 실행할려고 Fetch cycle로 돌아 가기 직전에 외부 인터럽트가 걸린지 체크 ![image](https://user-images.githubusercontent.com/87630540/167259370-976dfb6a-6e4b-4e42-9268-c7e60b6b4dcd.png) #### Multiprocessor systems(general - purpose processor 2개이상) - parallel systems(multi-core) - Tightly - coupled system : 여러 cpu가 좁은 공간에서 공유된 메모리를 통해 데이터를 주고받음 장점? 1. 수능 1~30문제를 1명이 계산하는 것보다 여러명이서 계산하는 것이 좋음 -> Increased throughput 2. single-processor가 3개있는 것보다 3-cores cpu가 가격이 더 쌈 -> Economy of scale 3. 한 cpu가 다운 되면 다른 cpu가 커버 가능 -> Increased reliability Symmetric Multiprocessing (SMP) (각 CPU는 동등한 위치?직위? 감사합니다 )에서 실행 - Load balacing is important 모든 프로세스에게 부여된 일이 균등해야 스피드업을 기대가능 (예를 들어 한 명은 10000개일을 하고 다른 4명은 100개씩 일을 하면 한 명이 10000개를 끝날 때까지 기다려야함) - 대부분 사용하는 멀티프로세싱 기법 ![image](https://user-images.githubusercontent.com/87630540/167259410-9702aef2-a8db-4db6-96da-6cc8d5421ed9.png) Asymmetric multiprocessing 각 CPU가 정해진 일이 있음 Master processor 가 일을 할당, Slave processor는 부여된 일을 수행 (수능 문제를 푸는데 1명이 다른 3명에게 너는 1~10번문제, 너는 11~20, 너는 21~30 풀어) - 굉장히 큰 시스템에서 사용 Multiprocessing increases computing power or amount of memory addressable by adding CPUs - 공유하고있는 bus에서 충돌 발생이 커짐 - 그래서 나온 것이 Memory access model NUMA ![image](https://user-images.githubusercontent.com/87630540/167259463-654e8b18-e611-4bb5-bc8c-15f03db6970c.png) 그래서 누마가 뭔데? : 공유가 많이 안되는 정보는 각자의 cpu에 저장, 공유가 많이 되는 데이터는 main memory에 저장 -> memory access 성능이 올라감 (충돌이 별로 안되니깐) -> ex) 헬스장에서 운동할 때 머신존이나 핑크덤벨존, 프리웨이트존 따로 있으면 헬창형들과 헬린이가 겹칠일이 없음 또 하나의 mulitprocessor system !! -> clustered systems - SMP나 ASP는 Tightly-coupled였지만 Clustered system은 loosely-coupled system - SAN이라는 공유하는 저장공간 지역 네트워크와 각 node들은 independent인데 single-processor가 한 노드가 될 수있고 다른 노드는 multi-core일 수도 있음 ![image](https://user-images.githubusercontent.com/87630540/167259515-00a7db9d-8205-4c24-8398-c30d69c715fe.png) - High - availability(HA) 서비스 제공 Asymmetric clustering: ex) 두 개의 system이 있는데 하나는 active상태, 하나는 hot-standby 상태 이때 active가 faiures가 되면 hot-standby인 시스템이 active Symmetric clustering : ex) 각 시스템 끼리 서로 감시하다가 어떤 노드가 fail이 발생한 노드가 있으면 fail이 발 발생한 노드가 하던 일이 다른 시스템이 take-off - parallelization Distributed Systems(별로 안 중요하게 말씀하심) - 앞에 clustered system은 분산 시스템의 한 example임 - 마찬가지로 loosely coupled system - 계산을 분산함
rs.initiate({ _id: "odmantic-replica-set", version: 1, members: [ { _id: 0, host: "172.16.17.11:27017" }, { _id: 1, host: "172.16.17.12:27017" }, { _id: 2, host: "172.16.17.13:27017" }, ], });
# Test-Application This folder reflects a normal Symfony application structure. Please keep this in mind if you're add new test files.
// Copyright © 2020 The With-Go Authors. All rights reserved. // Licensed under the BSD 3-Clause License. // You may not use this file except in compliance with the license // that can be found in the LICENSE.md file. package array import ( "reflect" "testing" ) func TestPresenter_AsFloat64Slice(t *testing.T) { floatSlice := []float64{0, 1, 0.55, 3.14} array := New(0, 1, 0.55, 3.14) values, err := array.Present().AsFloat64Slice() if err != nil { t.Error(err) } if !reflect.DeepEqual(values, floatSlice) { t.Error("array.Present().AsFloat64Slice() does not have expected values") t.Errorf("Expecting %v, got %v", values, floatSlice) } } func TestPresenter_AsInt64Slice(t *testing.T) { intSlice := []int64{-1, 0, 1, 2} array := New(-1, 0, 1, 2) values, err := array.Present().AsInt64Slice() if err != nil { t.Error(err) } if !reflect.DeepEqual(values, intSlice) { t.Error("array.Present().AsInt64Slice() does not have expected values") t.Errorf("Expecting %v, got %v", values, intSlice) } } func TestPresenter_AsStringSlice(t *testing.T) { stringSlice := []string{"a", "b", "c"} array := New("a", "b", "c") values, err := array.Present().AsStringSlice() if err != nil { t.Error(err) } if !reflect.DeepEqual(values, stringSlice) { t.Error("array.Present().AsStringSlice() does not have expected values") t.Errorf("Expecting %v, got %v", values, stringSlice) } } func TestPresenter_AsUint64Slice(t *testing.T) { uintSlice := []uint64{0, 1, 2, 4, 5} array := New(0, 1, 2, 4, 5) values, err := array.Present().AsUint64Slice() if err != nil { t.Error(err) } if !reflect.DeepEqual(values, uintSlice) { t.Error("array.Present().AsUint64Slice() does not have expected values") t.Errorf("Expecting %v, got %v", values, uintSlice) } }
/* * * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.web.spring.autoconfigure.agent.apis.rpc.v4.endpoints; import com.netflix.genie.common.internal.dtos.v4.converters.JobDirectoryManifestProtoConverter; import com.netflix.genie.common.internal.dtos.v4.converters.JobServiceProtoConverter; import com.netflix.genie.common.internal.util.GenieHostInfo; import com.netflix.genie.proto.FileStreamServiceGrpc; import com.netflix.genie.proto.HeartBeatServiceGrpc; import com.netflix.genie.proto.JobKillServiceGrpc; import com.netflix.genie.proto.JobServiceGrpc; import com.netflix.genie.proto.PingServiceGrpc; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.GRpcAgentFileStreamServiceImpl; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.GRpcHeartBeatServiceImpl; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.GRpcJobKillServiceImpl; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.GRpcJobServiceImpl; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.GRpcPingServiceImpl; import com.netflix.genie.web.agent.apis.rpc.v4.endpoints.JobServiceProtoErrorComposer; import com.netflix.genie.web.agent.services.AgentConnectionTrackingService; import com.netflix.genie.web.agent.services.AgentJobService; import com.netflix.genie.web.data.services.DataServices; import com.netflix.genie.web.properties.AgentFileStreamProperties; import com.netflix.genie.web.properties.HeartBeatProperties; import io.micrometer.core.instrument.MeterRegistry; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.TaskScheduler; import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; /** * Configures various gRPC services and related beans if gRPC functionality is enabled. * * @author tgianos * @since 4.0.0 */ @Configuration @Slf4j @EnableConfigurationProperties( { AgentFileStreamProperties.class, HeartBeatProperties.class, } ) public class AgentRpcEndpointsAutoConfiguration { private static final int SINGLE_THREAD = 1; /** * Get the task scheduler used by the HeartBeat Service. * * @return The task scheduler */ @Bean @ConditionalOnMissingBean(name = "heartBeatServiceTaskScheduler") public TaskScheduler heartBeatServiceTaskScheduler() { final ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); scheduler.setPoolSize(SINGLE_THREAD); return scheduler; } /** * Bean for converting errors in the job service to gRPC messages. * * @return An instance of {@link JobServiceProtoErrorComposer} */ @Bean @ConditionalOnMissingBean(JobServiceProtoErrorComposer.class) public JobServiceProtoErrorComposer jobServiceProtoErrorComposer() { return new JobServiceProtoErrorComposer(); } /** * Provide an implementation of {@link com.netflix.genie.proto.FileStreamServiceGrpc.FileStreamServiceImplBase} * if no other is provided. * * @param converter The {@link JobDirectoryManifestProtoConverter} instance to use * @param taskScheduler The {@link TaskScheduler} to use to schedule tasks * @param properties The service properties * @param registry The meter registry * @return An instance of {@link GRpcAgentFileStreamServiceImpl} */ @Bean @ConditionalOnMissingBean(FileStreamServiceGrpc.FileStreamServiceImplBase.class) public GRpcAgentFileStreamServiceImpl gRpcAgentFileStreamService( final JobDirectoryManifestProtoConverter converter, @Qualifier("genieTaskScheduler") final TaskScheduler taskScheduler, final AgentFileStreamProperties properties, final MeterRegistry registry ) { return new GRpcAgentFileStreamServiceImpl(converter, taskScheduler, properties, registry); } /** * Provide an implementation of {@link com.netflix.genie.proto.HeartBeatServiceGrpc.HeartBeatServiceImplBase} * if no other is provided. * * @param agentConnectionTrackingService The {@link AgentConnectionTrackingService} implementation to use * @param properties The service properties * @param taskScheduler The {@link TaskScheduler} instance to use * @param registry The meter registry * @return A {@link GRpcHeartBeatServiceImpl} instance */ @Bean @ConditionalOnMissingBean(HeartBeatServiceGrpc.HeartBeatServiceImplBase.class) public GRpcHeartBeatServiceImpl gRpcHeartBeatService( final AgentConnectionTrackingService agentConnectionTrackingService, final HeartBeatProperties properties, @Qualifier("heartBeatServiceTaskScheduler") final TaskScheduler taskScheduler, final MeterRegistry registry ) { return new GRpcHeartBeatServiceImpl(agentConnectionTrackingService, properties, taskScheduler, registry); } /** * Provide an implementation of {@link com.netflix.genie.proto.JobKillServiceGrpc.JobKillServiceImplBase} * if no other is provided. * * @param dataServices The {@link DataServices} instance to use * @return A {@link GRpcJobKillServiceImpl} instance */ @Bean @ConditionalOnMissingBean(JobKillServiceGrpc.JobKillServiceImplBase.class) public GRpcJobKillServiceImpl gRpcJobKillService(final DataServices dataServices) { return new GRpcJobKillServiceImpl(dataServices); } /** * Provide an implementation of {@link com.netflix.genie.proto.JobServiceGrpc.JobServiceImplBase} if no other is * provided. * * @param agentJobService The {@link AgentJobService} instance to use * @param jobServiceProtoConverter The {@link JobServiceProtoConverter} instance to use * @param protoErrorComposer The {@link JobServiceProtoErrorComposer} instance to use * @return A {@link GRpcJobServiceImpl} instance */ @Bean @ConditionalOnMissingBean(JobServiceGrpc.JobServiceImplBase.class) public GRpcJobServiceImpl gRpcJobService( final AgentJobService agentJobService, final JobServiceProtoConverter jobServiceProtoConverter, final JobServiceProtoErrorComposer protoErrorComposer ) { return new GRpcJobServiceImpl(agentJobService, jobServiceProtoConverter, protoErrorComposer); } /** * Provide an implementation of {@link com.netflix.genie.proto.PingServiceGrpc.PingServiceImplBase} if no * other is provided. * * @param genieHostInfo The information about the Genie host * @return Instance of {@link GRpcPingServiceImpl} */ @Bean @ConditionalOnMissingBean(PingServiceGrpc.PingServiceImplBase.class) public GRpcPingServiceImpl gRpcPingService(final GenieHostInfo genieHostInfo) { return new GRpcPingServiceImpl(genieHostInfo); } }
#!/bin/bash # git archive --format=tar --prefix=stack/ HEAD | gzip >/pi/archive/sagas-stack-0.1.tar.gz rm /pi/archive/sagas/sagas-stack* git archive --prefix=stack/ -o /pi/archive/sagas/sagas-stack-0.1.tar.gz HEAD echo 'done.' ls -alh /pi/archive/sagas/sagas-stack*
class Task < ActiveRecord::Base has_many :critiques, foreign_key: 'create_in_task_id' has_many :artifacts, foreign_key: 'submitted_in_task_id' end
package com.bitatron.adconsent.data import com.bitatron.adconsent.presentation.Status class GoogleConsentStatusToConsentStatusMapper { fun map(consentStatus: com.google.ads.consent.ConsentStatus): ConsentStatus = when (consentStatus) { com.google.ads.consent.ConsentStatus.UNKNOWN -> ConsentStatus(Status.UNKNOWN) com.google.ads.consent.ConsentStatus.NON_PERSONALIZED -> ConsentStatus(Status.NON_PERSONALIZED) com.google.ads.consent.ConsentStatus.PERSONALIZED -> ConsentStatus(Status.PERSONALIZED) else -> { throw Exception("Unknown Google Consent Status") } } }
<?php declare(strict_types=1); namespace spaceonfire\Bridge\Cycle\Collection; /** * @template T of object * @template P */ final class Change { public const ADD = 'ADD'; public const REMOVE = 'REMOVE'; /** * @phpstan-var self::ADD|self::REMOVE */ private string $type; /** * @var T */ private object $element; /** * @var P|null */ private $pivot; /** * @phpstan-param self::ADD|self::REMOVE $type * @param T $element * @param P|null $pivot */ private function __construct(string $type, object $element, $pivot = null) { $this->type = $type; $this->element = $element; $this->pivot = $pivot; } /** * @phpstan-return self::ADD|self::REMOVE */ public function getType(): string { return $this->type; } /** * @return T */ public function getElement(): object { return $this->element; } /** * @return P|null */ public function getPivot() { return $this->pivot; } /** * @param P|null $pivot */ public function setPivot($pivot): void { $this->pivot = $pivot; } /** * @param T $element * @param P|null $pivot * @return self<T,P> */ public static function add(object $element, $pivot = null): self { return new self(self::ADD, $element, $pivot); } /** * @param T $element * @param P|null $pivot * @return self<T,P> */ public static function remove(object $element, $pivot = null): self { return new self(self::REMOVE, $element, $pivot); } /** * @param T $element * @param T ...$elements * @return \Generator<self<T,mixed>> */ public static function addElements(object $element, object ...$elements): \Generator { foreach ([$element, ...$elements] as $item) { yield self::add($item); } } /** * @param T $element * @param T ...$elements * @return \Generator<self<T,mixed>> */ public static function removeElements(object $element, object ...$elements): \Generator { foreach ([$element, ...$elements] as $item) { yield self::remove($item); } } }
class OrderCall extends eVENT { constructor({occTime, delay}={}) { super({occTime, delay}); } } OrderCall.successorNode = "TakeOrder"; OrderCall.eventRate = 0.5;
package com.electrit.protokol import kotlin.random.Random import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertFailsWith class ObjectsTest { @Suppress("EqualsOrHashCode") class Data( var bytes: ByteArray = ByteArray(0), var string: String = "", ) { override fun equals(other: Any?): Boolean { if (this === other) return true if (other == null || this::class != other::class) return false other as Data if (!bytes.contentEquals(other.bytes)) return false if (string != other.string) return false return true } } object DataProtokolObject : ProtokolObject<Data> { override val protokol: Protokol.(Data) -> Unit = { with(it) { BYTEARRAY(::bytes) STRING(::string) } } override fun create() = Data() } @Suppress("EqualsOrHashCode") class ComplexData( var name: String = "", var data: Data? = null, var bytes: ByteArray = ByteArray(0), ) { override fun equals(other: Any?): Boolean { if (this === other) return true if (other == null || this::class != other::class) return false other as ComplexData if (name != other.name) return false if (data != other.data) return false if (!bytes.contentEquals(other.bytes)) return false return true } } object ComplexDataProtokolObject : ProtokolObject<ComplexData> { override val protokol: Protokol.(ComplexData) -> Unit = { with(it) { STRING(::name) OBJECT(::data, DataProtokolObject) { if (this == null || string == "covid-19") throw IllegalArgumentException("null and virus are not allowed") } BYTEARRAY(::bytes) } } override fun create() = ComplexData() } data class ObjectsData(var list: List<ComplexData> = emptyList()) object ObjectsDataProtokolObject : ProtokolObject<ObjectsData> { override val protokol: Protokol.(ObjectsData) -> Unit = { with(it) { OBJECTS(::list, ComplexDataProtokolObject) } } override fun create() = ObjectsData() } private val sizeChecker: (Int) -> Unit = { size -> if (size == 0) throw IllegalArgumentException("size can't be 0") } private val validator: ComplexData.() -> Unit = { if (name == "test") throw IllegalArgumentException("List elements can't have name='test'") } @Test fun test() { fun assert(list: List<ComplexData>) { val bytes = ByteArrayProtokolCodec.encodeList(list, ComplexDataProtokolObject) val decodedList = ByteArrayProtokolCodec.decodeList(bytes, ComplexDataProtokolObject) assertEquals(list, decodedList) } fun strictAssert(list: List<ComplexData>) { val bytes = ByteArrayProtokolCodec.encodeList(list, ComplexDataProtokolObject, sizeChecker, validator) val decodedList = ByteArrayProtokolCodec.decodeList(bytes, ComplexDataProtokolObject) assertEquals(list, decodedList) } assert(emptyList()) assertFailsWith<IllegalArgumentException> { strictAssert(emptyList()) } val list = List(128) { val data = Data() data.bytes = Random.nextBytes(Random.nextInt(150)) data.string = Random.nextBytes(Random.nextInt(150)).decodeToString() val complexData = ComplexData() complexData.name = if (it == 9) "test" else Random.nextBytes(Random.nextInt(150)).decodeToString() complexData.data = data complexData.bytes = Random.nextBytes(Random.nextInt(150)) complexData } val data = ObjectsData(list) val bytes = ByteArrayProtokolCodec.encode(data, ObjectsDataProtokolObject) val decodedData = ByteArrayProtokolCodec.decode(bytes, ObjectsDataProtokolObject) assertEquals(data, decodedData) assert(list) assertFailsWith<IllegalArgumentException> { strictAssert(list) } } @Test fun testParseError() { assertFailsWith<IllegalArgumentException> { val bytes = ByteArrayProtokolCodec.encodeList(emptyList(), ComplexDataProtokolObject) ByteArrayProtokolCodec.decodeList(bytes, ComplexDataProtokolObject, sizeChecker, validator) } assertFailsWith<IllegalArgumentException> { val bytes = ByteArrayProtokolCodec.encodeList(List(1) { val data = Data() data.bytes = Random.nextBytes(Random.nextInt(150)) data.string = Random.nextBytes(Random.nextInt(150)).decodeToString() val complexData = ComplexData() complexData.name = "test" complexData.data = data complexData.bytes = Random.nextBytes(Random.nextInt(150)) complexData }, ComplexDataProtokolObject) ByteArrayProtokolCodec.decodeList(bytes, ComplexDataProtokolObject, sizeChecker, validator) } } }
package com.cognifide.slung.component.filter.configuration; public interface ComponentFilterConfiguration { boolean isEnabled(); boolean canFilter(String resourceType); }
using AbstractTrees using Primes using Tables using Test using Wordlegames const primes5 = primes(10000, 99999) # vector of 5-digit prime numbers const primel = GamePool(primes5) const primelxpc = GamePool(primes5; guesstype=MinimizeExpected) @testset "GamePool" begin @test typeof(primel) == GamePool{5,UInt8,MaximizeEntropy} @test isa(propertynames(primel), Tuple) @test length(primel.active) == 8363 @test eltype(primel.allscores) == UInt8 @test eltype(primel.guesspool) == NTuple{5,Char} @test length(first(primel.guesspool)) == 5 @test primel.targetpool == primel.guesspool @test sum(primel.activetargets) == length(primel.active) @test length(names(primel.summary)) == 7 @test all(reset!(primel).active) (; poolsz, guess, index, expected, entropy, score, sc) = only(primel.guesses) @test guess == "12953" @test index == 313 @test expected ≈ 124.3844314241301 @test entropy ≈ 6.632274058429609 @test primel.hardmode @test ismissing(score) @test ismissing(sc) playgame!(primel, index) # Got it in one! (; poolsz, guess, index, expected, entropy, score, sc) = only(primel.guesses) @test sc == 0xf2 @test score == "🟩🟩🟩🟩🟩" Random.seed!(1234321) (; poolsz, guess, index, expected, entropy, score, sc) = showgame!(primel) @test poolsz == [8363, 201, 10] @test index == [313, 1141, 3556] @test guess == ["12953", "21067", "46271"] @test expected ≈ [124.3844314241301, 5.925373134328358, 1.2] @test entropy ≈ [6.632274058429609, 5.479367512099353, 3.121928094887362] @test sc == [108, 112, 242] (; poolsz, guess, index, expected, entropy, score, sc) = showgame!(primel, "43867") @test index == [313, 2387, 3273, 3337] # size mismatch @test_throws ArgumentError playgame!(primel, "4321") # errors in constructor arguments @test_throws ArgumentError GamePool(["foo", "bar"], trues(4)) # gp = GamePool(["foo", "bar", "boz"], BitVector([true, true, false])) @test_broken isa(gp, GamePool{3,UInt8}) @test_broken isa(gp.allscores, Matrix{UInt8}) @test_broken size(gp.allscores) == (2, 3) @test_throws ArgumentError GamePool(["foo", "bar", "foobar"]) @test_throws ArgumentError GamePool(["foo", "bar"]; guesstype=Int) @test Tables.isrowtable(playgame!(primel).guesses) # this also covers the playgame! method for testing end @testset "scorecolumn!" begin targets = NTuple{5,Char}.(["raise", "super", "adapt", "algae", "abbey"]) scores = similar(targets, UInt8) @test first(scorecolumn!(scores, targets[1], targets)) == 242 @test_throws DimensionMismatch scorecolumn!(zeros(UInt8,4), targets[1], targets) @test scorecolumn!(scores, targets[3], targets)[3] == 242 targets = NTuple{5,Char}.(["12953", "34513", "51133", "51383"]) scores = scorecolumn!(similar(targets, UInt8), targets[4], targets) @test first(scores) == 0x6e @test last(scores) == 0xf2 scorecolumn!(scores, targets[2], targets) @test last(scores) == 0x5f end @testset "scoretype" begin @test Wordlegames.scoretype(5) == UInt8 @test_throws ArgumentError Wordlegames.scoretype(0) @test Wordlegames.scoretype(6) == UInt16 @test Wordlegames.scoretype(11) == UInt32 @test Wordlegames.scoretype(21) == UInt64 @test Wordlegames.scoretype(80) == UInt128 @test_throws ArgumentError Wordlegames.scoretype(81) end @testset "scoreupdate!" begin @test last(scoreupdate!(reset!(primel), [1, 0, 0, 1, 1]).guesses).poolsz == 120 @test_throws ArgumentError scoreupdate!(primel, [3, 0, 0, 3, 3]) end @testset "tree" begin io = IOBuffer() primetree = tree(primel) print_tree(io, primetree; maxdepth=8) @test length(take!(io)) > 480_000 rootscore = primetree.score @test isa(rootscore, GuessScore) @test rootscore.guess == "12953" @test ismissing(rootscore.score) @test ismissing(rootscore.sc) randtree = tree(primel, Random.seed!(1234321), 15) @test randtree.score.guess == rootscore.guess tree55541 = tree(primel, ["55541"]) leafnode = only(collect(Leaves(tree55541))) @test isempty(leafnode.children) @test leafnode.score.guess == "55541" end
(ns general-expenses-accountant.config (:require [clojure.java.io :as io] [mount.core :as mount :refer [defstate]] [omniconf.core :as cfg] [taoensso.timbre :as log])) (def ^:private dev-env "DEV") (cfg/define {:env-type {:description "Environment type (to be set in CMD args or ENV)" :one-of #{dev-env "PROD"} :required true :type :string} :database-url {:description "The Heroku's standard 'DATABASE_URL' var" :type :string} :db-user {:description "The database user name for bot" :type :string} :db-password {:description "The database user password" :type :string} :max-db-conn {:description "Max # of simultaneous DB connections" :default 10 :type :number} :bot-api-token {:description "Telegram Bot API token" :verifier #(= (count %2) 46) :required true :type :string} :bot-url {:description "The bot URL (for a webhook)" :type :string} :heroku-app-name {:description "Heroku app name" :type :string} :port {:description "HTTP web server port to use" :type :number :default 8080}}) (defn get-prop [key] (cfg/get key)) ;; internally memoized (defn in-dev? [] (= (get-prop :env-type) dev-env)) (defn load-and-validate! ([] (load-and-validate! [])) ([args] (load-and-validate! args "dev/config.edn")) ([args file] (cfg/populate-from-cmd args) (cfg/populate-from-env) ;; here, the ':env-type' have to be determined already (when (and (some? file) (in-dev?)) (if (.exists (io/as-file file)) (cfg/populate-from-file file) (log/warn "Can't find local dev configuration file" file))) (cfg/verify :quit-on-error true :silent (not (in-dev?))))) (defstate ^:private loader :start (load-and-validate! (mount/args)))
<?php /* * $Id: PropelDateTime.php 1262 2009-10-26 20:54:39Z francois $ * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the LGPL. For more information please see * <http://propel.phpdb.org>. */ /** * DateTime subclass which supports serialization. * * Currently Propel is not using this for storing date/time objects * within model objeects; however, we are keeping it in the repository * because it is useful if you want to store a DateTime object in a session. * * @author Alan Pinstein * @author Soenke Ruempler * @author Hans Lellelid * @package propel.util */ class PropelDateTime extends DateTime { /** * A string representation of the date, for serialization. * @var string */ private $dateString; /** * A string representation of the time zone, for serialization. * @var string */ private $tzString; /** * Convenience method to enable a more fluent API. * @param string $date Date/time value. * @param DateTimeZone $tz (optional) timezone */ public static function newInstance($date, DateTimeZone $tz = null) { if ($tz) { return new DateTime($date, $tz); } else { return new DateTime($date); } } /** * PHP "magic" function called when object is serialized. * Sets an internal property with the date string and returns properties * of class that should be serialized. * @return array string[] */ function __sleep() { // We need to use a string without a time zone, due to // PHP bug: http://bugs.php.net/bug.php?id=40743 $this->dateString = $this->format('Y-m-d H:i:s'); $this->tzString = $this->getTimeZone()->getName(); return array('dateString', 'tzString'); } /** * PHP "magic" function called when object is restored from serialized state. * Calls DateTime constructor with previously stored string value of date. */ function __wakeup() { parent::__construct($this->dateString, new DateTimeZone($this->tzString)); } }
--- company-name: "Albert Technologies (Formerly Adgorithms)" domain: albert.ai home: https://albert.ai/ privacy-policy: https://albert.ai/terms-and-conditions/ email: [email protected] member-of: "Interactive Advertising Bureau (IAB)" ---
# Contributing to XPM [A full guide on contributing to all repositories in this organization will be found here soon.][contributing] [contributing]: https://github.com/xanite/xanite/blob/master/CONTRIBUTING.md
module EventCalendar class ApplicationController < ActionController::Base include Concerns::ApplicationController layout 'layouts/application' helper ::ApplicationHelper before_action :login_required private #=== エンジンが有効かを判定する。 # 無効の場合リダイレクトする。 def enable_engine_required unless EngineMaster.enable?(EventCalendar::Engine.engine_name) flash[:alert] = t("shared.engines.disable") return redirect_to(main_app.susanoo_dashboards_path) end end end end
package ar.com.pablitar.funspaces trait Positioned[T] { def space: AbstractSpace[T] var position: Point[T] = space.origin } trait Speedy[T] <: Positioned[T]{ def speed: Point[T] def applySpeed(delta:T) = position = position + speed * delta } class Car(var speed: Point[Double])(implicit val space:Space) extends Speedy[Double]
package io.qbeast.spark.keeper import io.qbeast.core.keeper.{Keeper, LocalKeeper} class ProtocolMockTest extends ProtocolMockTestSpec { "the qbeast-spark client" should "throw an execution when an inconstant state is found" in withContext(LocalKeeper) { context => implicit val keeper: Keeper = LocalKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA")) val writer = new WritingProcess(context) val badOptimizer = new OptimizingProcessBad(context, Seq("gA", "g")) initProcess.startTransactionAndWait() initProcess.finishTransaction() announcer.start() announcer.join() writer.startTransactionAndWait() badOptimizer.startTransactionAndWait() badOptimizer.finishTransaction() writer.finishTransaction() writer.succeeded shouldBe Some(false) } "A faulty keeper" should "not cause inconsistency with conflicts" in withContext(RandomKeeper) { context => implicit val keeper: Keeper = RandomKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA")) val writer = new WritingProcess(context) val optim = new OptimizingProcessGood(context) initProcess.startTransactionAndWait() initProcess.finishTransaction() announcer.start() announcer.join() writer.startTransactionAndWait() optim.startTransactionAndWait() optim.finishTransaction() writer.finishTransaction() writer.succeeded shouldBe Some(false) } it should "not cause inconsistency when there are not conflicts" in withContext(RandomKeeper) { context => implicit val keeper: Keeper = RandomKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA")) val writer = new WritingProcess(context) val optim = new OptimizingProcessGood(context) initProcess.startTransactionAndWait() initProcess.finishTransaction() announcer.start() announcer.join() writer.startTransactionAndWait() writer.finishTransaction() optim.startTransactionAndWait() optim.finishTransaction() writer.succeeded shouldBe Some(true) } "A crashed with timeouts" should "not cause inconsistency in normal scenario" in withContext( LocalKeeper) { context => implicit val keeper: Keeper = LocalKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA")) val writer = new WritingProcess(context) val optim = new OptimizingProcessGood(context) initProcess.startTransactionAndWait() initProcess.finishTransaction() announcer.start() announcer.join() writer.startTransactionAndWait() writer.finishTransaction() optim.startTransactionAndWait() optim.finishTransaction() writer.succeeded shouldBe Some(true) } "A write timout" should "not cause inconsistency when a a timeout may interfere with an optimization" in withContext( LocalKeeper) { context => implicit val keeper = LocalKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA")) val writer = new WritingProcess(context) val optim = new OptimizingProcessGood(context) initProcess.startTransactionAndWait() initProcess.finishTransaction() writer.startTransactionAndWait() Thread.sleep(1000) // We make sure the keeper forgot about this write operations announcer.start() // so that when we announce, we are not aware of a running write operation announcer.join() // which should lead the optim to optimize something it should not be touched. optim.startTransactionAndWait() optim.finishTransaction() // But the write should detect it and fail writer.finishTransaction() writer.succeeded shouldBe Some(false) } "A crashed optimization" should "not caused problems" in withContext(LocalKeeper) { context => implicit val keeper = LocalKeeper val initProcess = new InitProcess(context) val announcer = new AnnouncerProcess(context, Seq("", "A", "AA")) val writer = new WritingProcess(context) val optim1 = new OptimizingProcessGood(context) initProcess.startTransactionAndWait() initProcess.finishTransaction() writer.startTransactionAndWait() announcer.start() // so that when we announce, we are not aware of a running write operation announcer.join() // which should lead the optim to optimize something it should not be touched. optim1.startTransactionAndWait() optim1.killMe() Thread.sleep(1000) // this should ensure the client cleans the pending optimization writer.finishTransaction() writer.succeeded shouldBe Some(true) } }
#!/bin/bash cd "$(dirname "${BASH_SOURCE[0]}")" \ && . "../../utils.sh" \ && . "./utils.sh" # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - print_in_purple "\n System Utils\n\n" brew_install "Iterm2 Terminal" "iterm2" "--cask" "" "async-install" #brew_install "Magicprefs" "magicprefs" "--cask" "" "async-install" brew_install "Hyperdock" "hyperdock" "--cask" "" "async-install" #brew_install "Ansible" "ansible" # Need to use older versions which are provided by Pip brew_install "AWScli" "awscli" brew_install "Bat" "bat" brew_install "Htop" "htop" brew_install "Httpie" "httpie" brew_install "JQ" "jq" brew_install "Magic Wormhole" "magic-wormhole" brew_install "MTR" "mtr" brew_install "Ncdu" "ncdu" brew_install "Nmap" "nmap" brew_install "Pretty ping" "prettyping" brew_install "GNU utilities for networking" "inetutils" brew_install "Terraform" "terraform" brew_install "Testssl" "testssl" brew_install "Tree" "tree" brew_install "zlib" "zlib"
package com.sksamuel.centurion.formats import com.sksamuel.centurion.Logging import com.sksamuel.centurion.Struct import com.sksamuel.centurion.StructType import com.sksamuel.centurion.parquet.ToParquetSchema import com.sksamuel.centurion.parquet.parquetReader import com.sksamuel.centurion.parquet.parquetWriter import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path object ParquetFormat : Format, Logging { override fun serde() = Serde( "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", mapOf("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe" to "1") ) override fun writer(path: Path, schema: StructType, conf: Configuration): StructWriter = object : StructWriter, Logging { // setting overwrite to false, as it should be considered a bug if a hive writer // tries to overwrite an existing file val writer by lazy { logger.debug("Creating parquet writer at $path") parquetWriter( path, conf, schema = ToParquetSchema.toMessageType(schema), overwrite = false ) } override fun write(struct: Struct): Unit = writer.write(struct) override fun close(): Unit = writer.close() } override fun reader(path: Path, schema: StructType, conf: Configuration): StructReader = object : StructReader { val reader = parquetReader(path, conf) override fun read() = reader.read() override fun close(): Unit = reader.close() } }
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE LambdaCase #-} -- | module Lang.Syntax where import Data.Semigroup ((<>)) import Data.Text (Text) import qualified Data.Text as Text import Lang.Annot data Term name ann = App ann (Term name ann) (Term name ann) | Abs ann name (Term name ann) | Let ann name (Term name ann) (Term name ann) | Ident ann name | Hole ann name | Number ann (Either Integer Double) deriving (Show, Eq, Functor) instance Annot (Term name ann) where type Ann (Term name ann) = ann getAnn = \case App ann _ _ -> ann Abs ann _ _ -> ann Let ann _ _ _ -> ann Ident ann _ -> ann Hole ann _ -> ann Number ann _ -> ann class PrettyName name where prettyName :: name -> Text prettyTerm :: (PrettyName name) => Term name ann -> Text prettyTerm = \case App _ t1 t2 -> prettyTerm t1 <> " " <> case t2 of App{} -> "( " <> prettyTerm t2 <> " )" _ -> prettyTerm t2 Abs _ n b -> "\\ " <> prettyName n <> " -> " <> prettyTerm b Let _ n e b -> "let " <> prettyName n <> " = " <> prettyTerm e <> " in " <> prettyTerm b Ident _ n -> prettyName n Hole _ n -> "?" <> prettyName n Number _ n -> Text.pack $ either show show n
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. // test w/ `pub run test -N diagnostic_describe_all_properties` class DiagnosticPropertiesBuilder { void add(DiagnosticsProperty property) {} } class DiagnosticsProperty<T> {} class StringProperty extends DiagnosticsProperty<String> { StringProperty( String name, String value, { String description, String tooltip, bool showName = true, Object defaultValue, bool quoted, String ifEmpty, //DiagnosticLevel level = DiagnosticLevel.info, }); } abstract class Diagnosticable { void debugFillProperties(DiagnosticPropertiesBuilder properties); List<DiagnosticsNode> debugDescribeChildren(); } class DiagnosticsNode {} class Widget {} class MyWidget extends Diagnosticable { Widget p0; //Skipped List<Widget> p00; //Skipped Widget get p000 => null; //Skipped String p1; //OK String p2; //LINT String get p3 => ''; //LINT String _p3; //OK String debugFoo; //OK String foo; //OK (covered by debugFoo) String debugBar; //OK (covered by bar) String bar; //OK static String p4; //OK String p5; //OK (in debugDescribeChildren) @override void debugFillProperties(DiagnosticPropertiesBuilder properties) { properties .add(StringProperty('property', p1, defaultValue: null, quoted: false)); properties.add(StringProperty('debugFoo', debugFoo, defaultValue: null, quoted: false)); properties .add(StringProperty('bar', bar, defaultValue: null, quoted: false)); } @override List<DiagnosticsNode> debugDescribeChildren() { // In real source this should be used to create a diagnostics node, // but for us a reference suffices. print(p5); return null; } }
# Keyword `CURRENT_TIMESTAMP` Returns current date and time. Syntax: ```sql CURRENT_TIMESTAMP ```
%%%------------------------------------------------------------------- %%% @author Aaron Lelevier %%% @doc Rider server where a single Process maps to a single Rider %%% @end %%%------------------------------------------------------------------- -module(rl_rider). -behaviour(gen_server). -include("ride_log.hrl"). %% API -export([create_rider/1, start_link/1, join_ride/2, is_riding/1, ride_info/1]). %% gen_server -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). %% Macros -define(SERVER, ?MODULE). -define(INIT_STATE, #{ride => undefined}). %%%=================================================================== %%% API %%%=================================================================== %% TODO: might want to support 'Name' arg as a 'string' and cast to 'binary' %% TODO: might want to rename func to 'create' to be more polymorphic -spec create_rider(binary()) -> {ok, rl_db_rider:item()}. create_rider(Name) -> {ok, RiderId} = rl_db:create_rider(Name), {ok, _Pid} = start_link(RiderId), Rider = #{id => RiderId, name => Name}, {ok, Rider}. %% TODO: should not be able to join the same ride more than once %% TODO: should not be able to join more than one ride at a time %% @doc Rider begins a Ride %% this should POST the Rider's first "start point" entry for the Ride -spec join_ride(pid(), pid()) -> ok. join_ride(RiderId, RideId) -> lager:debug("join_ride RiderId:~p", [RiderId]), gen_server:call(RiderId, {join_ride, RideId}). is_riding(RiderId) -> gen_server:call(RiderId, is_riding). ride_info(RiderId) -> gen_server:call(RiderId, ride_info). %%%=================================================================== %%% Spawning and gen_server implementation %%%=================================================================== -spec start_link(id()) -> {ok, pid()}. start_link(RiderId) -> gen_server:start_link(?MODULE, [RiderId], []). -spec init(id()) -> {ok, map()}. init(RiderId) -> {ok, #{id => RiderId, ride => undefined}}. handle_call({join_ride, RideId}, _From, State0) -> lager:debug("handle_call join_ride RiderId:~p", [self()]), ok = rl_ride:add_rider(RideId, self()), State = State0#{ride := RideId}, {reply, ok, State}; handle_call(is_riding, _From, State) -> Reply = maps:get(ride, State) =/= undefined, {reply, Reply, State}; handle_call(ride_info, _From, State) -> Reply = maps:get(ride, State), {reply, Reply, State}. handle_cast(_Request, State) -> {noreply, State}. handle_info(_Info, State) -> {noreply, State}. terminate(_Reason, _State) -> ok. code_change(_OldVsn, State, _Extra) -> {ok, State}. %%%=================================================================== %%% Internal functions %%%===================================================================
using System; using System.Reflection.Emit; namespace Illuminator.Extensions { public static class Functions { /// <summary> /// Emit if statement. /// <example> /// <code> /// if (condition) { /// then(); /// } else { /// otherwise(); /// } /// </code> /// </example> /// </summary> /// <param name="condition">Expression that emit </param> /// <param name="then"></param> /// <param name="otherwise"></param> /// <returns>ILEmitterFunc</returns> public static ILEmitterFunc If( ILEmitterFunc condition, ILEmitterFunc then, ILEmitterFunc otherwise) => (in ILEmitter il) => il.Brfalse(condition, out var label) .Emit(then) .Br(out var end) .MarkLabel(label) .Emit(otherwise) .MarkLabel(end); public static ILEmitter Stloc<T>(this ILEmitter self, ILEmitterFunc value, out LocalBuilder local) { var il = self.DeclareLocal<T>(out var output); local = output; return il.Stloc(value, local); } public static ILEmitterFunc Ret<T>(T value) => (in ILEmitter il) => value switch { int val => il.Ldc_I4(val).Ret(), long val => il.Ldc_I8(val).Ret(), float val => il.Ldc_R4(val).Ret(), double val => il.Ldc_R8(val).Ret(), byte val => il.Ldc_I4_S(val).Ret(), sbyte val => il.Ldc_I4_S(val).Ret(), string val => il.Ldstr(val).Ret(), _ => throw new NotSupportedException("Only strings and numeric values are supported.") }; } }
require 'spec_helper' describe "PasswordResets" do it "emails user when requesting password reset" do user = FactoryGirl.build(:user) visit "/login" click_link "password" fill_in "Email", :with => user.email click_button "Reset Password" page.should have_content("Email sent") last_email.to.should include(user.email) end end
import { Body, Controller, Delete, Get, HttpException, HttpStatus, Param, Post, UsePipes, ValidationPipe, } from '@nestjs/common'; import { createHash } from 'crypto'; import { CreateUserDto } from './dto/create-user.dto'; import { UserEntity } from './user.entity'; import { UserService } from './user.service'; @Controller('users') export class UserController { constructor(private readonly userService: UserService) {} @Get() async findAll(): Promise<UserEntity[]> { return await this.userService.findAll(); } @UsePipes(new ValidationPipe()) @Post() async create(@Body() userData: CreateUserDto) { const userExists = await this.userService.findByName(userData.name); if (userExists) { throw new HttpException( { message: 'User already exists', }, HttpStatus.CONFLICT, ); } return this.userService.create({ name: userData.name, password: userData.password, }); } @Delete('/:name') async delete(@Param() params) { return await this.userService.delete(params.name); } @UsePipes(new ValidationPipe()) @Post('/login') async login(@Body() userData: CreateUserDto) { const user = await this.userService.findByName(userData.name); if (!user) { throw new HttpException( { message: `User don't exists`, }, HttpStatus.UNAUTHORIZED, ); } const hashedPassword = createHash('sha256') .update(userData.password, 'utf-8') .digest('hex'); if (user.password !== hashedPassword) { throw new HttpException( { message: `Wrong password`, }, HttpStatus.UNAUTHORIZED, ); } const token = await this.userService.generateToken(user); return { token }; } }
package scalaomg.server.room import akka.actor.ActorRef /** * Minimal interface for a client communication channel. It must have an Id and a send method. */ trait Client { /** * Client identifier. */ val id: String /** * Send a message to such client. * @param msg the message to send * @tparam T the type of the message to send */ def send[T](msg: T) // Comparing clients by Id override def equals(obj: Any): Boolean = obj != null && obj.isInstanceOf[Client] && obj.asInstanceOf[Client].id == this.id override def hashCode(): Int = super.hashCode() } private[server] object Client { /** * Creates a client that echoes messages to a specific actor. * @param actor the actor that will receive the messages * @param id the id of the client * @return the client instance */ def asActor(actor: ActorRef)(id: String): Client = new ClientImpl(id, actor) /** * It creates a mocked client that may have an Id and that can't send any message. * @param id the id of the client; if not provided it will have an empty one * @return the client instance */ def mock(id: String = ""): Client = MockClient(id) } private class ClientImpl(override val id: String, private val clientActor: ActorRef) extends Client { override def send[T](msg: T): Unit = clientActor ! msg } private case class MockClient(override val id: String) extends Client { override def send[T](msg: T): Unit = {} }
# -*- coding: utf-8 -*- """IPL Score Prediction.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1pUPWZBWfVqxbIXMyo7d9vJz4OduQsd_u """ import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns import warnings warnings.filterwarnings("ignore") data = pd.read_excel("IPL_Data.xlsx") data.head(5) data.info() data.describe() print("We have an IPL data from {} to {} ".format(data['date'].dt.year.min(), data['date'].dt.year.max())) data.isnull().sum() data['bat_team'].unique() data['bowl_team'].unique() """Droping teams who don't play now.""" data['bowl_team'].value_counts() teams = ['Kolkata Knight Riders', 'Chennai Super Kings', 'Rajasthan Royals','Mumbai Indians', 'Kings XI Punjab', 'Royal Challengers Bangalore', 'Delhi Daredevils', 'Sunrisers Hyderabad'] data = data[(data['bat_team'].isin(teams))&(data['bowl_team'].isin(teams))] """For final score predictions we don't need batsman, bowler, stricker, non-stricker and mid columns. So drop them.""" data.drop(['mid','batsman','bowler','striker','non-striker'], axis=1,inplace=True) data.head(5) corr = data.corr() sns.heatmap(corr,annot=True) # Converting categorical features using OneHotEncoding method df = pd.get_dummies(data=data, columns=['bat_team', 'bowl_team','venue']) df.head(5) # Splitting the data into train and test set X_train = df.drop(labels='total', axis=1)[df['date'].dt.year <= 2016] X_test = df.drop(labels='total', axis=1)[df['date'].dt.year >= 2017] y_train = df[df['date'].dt.year <= 2016]['total'].values y_test = df[df['date'].dt.year >= 2017]['total'].values # Removing the 'date' column X_train.drop(['date'], axis=True, inplace=True) X_test.drop(['date'], axis=True, inplace=True) """# Model Building""" # Linear Regression Model from sklearn.linear_model import LinearRegression regressor = LinearRegression() regressor.fit(X_train,y_train) y_pred = regressor.predict(X_test) from sklearn.metrics import r2_score, mean_absolute_error mean_absolute_error(y_test,y_pred) r2_score(y_test,y_pred) from sklearn.ensemble import RandomForestRegressor rf = RandomForestRegressor() rf.fit(X_train,y_train) rf_pred = rf.predict(X_test) r2_score(y_test,rf_pred) ## Ridge Regression from sklearn.linear_model import Ridge from sklearn.model_selection import GridSearchCV ridge=Ridge() parameters={'alpha':[1e-15,1e-10,1e-8,1e-3,1e-2,1,5,10,20,30,35,40]} ridge_regressor=GridSearchCV(ridge,parameters,scoring='neg_mean_squared_error',cv=5) ridge_regressor.fit(X_train,y_train) rg_pred = ridge_regressor.predict(X_test) r2_score(y_test,rg_pred) from xgboost import XGBRegressor xgb = XGBRegressor() xgb.fit(X_train,y_train) xgb_pred = xgb.predict(X_test) r2_score(y_test,xgb_pred) # Creating a pickle file for the classifier import pickle filename = 'model.pkl' pickle.dump(ridge_regressor, open(filename, 'wb'))
<?php /** * Copyright © 2013-2017 Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ namespace Magento\Checkout\Block\Cart\Item; /** * Cart Item Configure block * Updates templates and blocks to show 'Update Cart' button and set right form submit url * * @module Checkout */ class Configure extends \Magento\Framework\View\Element\Template { /** * Configure product view blocks * * @return $this */ protected function _prepareLayout() { // Set custom submit url route for form - to submit updated options to cart $block = $this->getLayout()->getBlock('product.info'); if ($block) { $block->setSubmitRouteData( [ 'route' => 'checkout/cart/updateItemOptions', 'params' => ['id' => $this->getRequest()->getParam('id')], ] ); } return parent::_prepareLayout(); } }
using SearchCustomNavigationPage.Controls; using SearchCustomNavigationPage.ViewModels; using Xamarin.Forms.Xaml; namespace SearchCustomNavigationPage.Views { [XamlCompilation(XamlCompilationOptions.Compile)] public partial class Page1 : SearchPage { public Page1() { InitializeComponent(); //NavigationPage.SetTitleIcon(this,"icon.png"); BindingContext = new SearchPageViewModel(); } } }
# 多层感知机的简洁实现 :label:`sec_mlp_concise` 正如你所期待的,我们可以(**通过高级API更简洁地实现多层感知机**)。 ```python import paddle from paddle import nn from d2l import torch as d2l import pd2l ``` C:\Users\WeiWu-GU\anaconda3\envs\pte\lib\site-packages\ipykernel\pylab\backend_inline.py:164: DeprecationWarning: `configure_inline_support` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.configure_inline_support()` configure_inline_support(ip, backend) ## 模型 与softmax回归的简洁实现(:numref:`sec_softmax_concise`)相比,唯一的区别是我们添加了2个全连接层(之前我们只添加了1个全连接层)。第一层是[**隐藏层**],它(**包含256个隐藏单元,并使用了ReLU激活函数**)。第二层是输出层。 ```python net = nn.Sequential(nn.Flatten(), nn.Linear(784, 256), nn.ReLU(), nn.Linear(256, 10)) for layer in net: if type(layer) == nn.Linear: weight_attr = paddle.framework.ParamAttr(initializer=paddle.nn.initializer.Normal(mean=0.0, std=0.01)) layer.weight_attr=weight_attr ``` [**训练过程**]的实现与我们实现softmax回归时完全相同,这种模块化设计使我们能够将与和模型架构有关的内容独立出来。 ```python batch_size, lr, num_epochs = 256, 0.1, 10 loss = nn.CrossEntropyLoss() trainer = paddle.optimizer.SGD(parameters=net.parameters(), learning_rate=lr) ``` ```python train_iter, test_iter = pd2l.load_data_fashion_mnist(batch_size) # d2l.train_ch3(net, train_iter, test_iter, loss, num_epochs, trainer) ``` ## 小结 * 我们可以使用高级API更简洁地实现多层感知机。 * 对于相同的分类问题,多层感知机的实现与softmax回归的实现相同,只是多层感知机的实现里增加了带有激活函数的隐藏层。 ## 练习 1. 尝试添加不同数量的隐藏层(也可以修改学习率)。怎么样设置效果最好? 1. 尝试不同的激活函数。哪个效果最好? 1. 尝试不同的方案来初始化权重。什么方法效果最好? [Discussions](https://discuss.d2l.ai/t/1802)
using System; using System.Buffers.Binary; using System.Diagnostics; using Microsoft.Extensions.Logging; using NaCl.Core; namespace Lyn.Protocol.Bolt8 { public class ChaCha20Poly1305CipherFunction : ICipherFunction { private readonly ILogger<ChaCha20Poly1305CipherFunction> _logger; readonly byte[] _key = new byte[32]; ulong _nonce; public ChaCha20Poly1305CipherFunction(ILogger<ChaCha20Poly1305CipherFunction> logger) { _logger = logger; } public void SetKey(ReadOnlySpan<byte> key) { key.CopyTo(_key); _nonce = 0; } public ReadOnlySpan<byte> GetKey() => _key; public ulong GetNonce() => _nonce; public int EncryptWithAd(ReadOnlySpan<byte> ad, ReadOnlySpan<byte> plaintext, Span<byte> ciphertext) { Debug.Assert(_key.Length == Aead.KEY_SIZE); Debug.Assert(ciphertext.Length >= plaintext.Length + Aead.TAG_SIZE); Span<byte> nonce = stackalloc byte[Aead.NONCE_SIZE]; BinaryPrimitives.WriteUInt64LittleEndian(nonce.Slice(4), _nonce); var cipher = new ChaCha20Poly1305(_key); var cipherTextOutput = ciphertext.Slice(0, plaintext.Length); var tag = ciphertext.Slice(plaintext.Length, Aead.TAG_SIZE); _logger.LogDebug($"Encrypting plain text with length of {plaintext.Length} with nonce {_nonce}"); cipher.Encrypt(nonce, plaintext.ToArray(), cipherTextOutput, tag, ad.ToArray()); _nonce++; return cipherTextOutput.Length + tag.Length; } public int DecryptWithAd(ReadOnlySpan<byte> ad, ReadOnlySpan<byte> ciphertext, Span<byte> plaintext) { Debug.Assert(_key.Length == Aead.KEY_SIZE); Debug.Assert(ciphertext.Length >= Aead.TAG_SIZE); Debug.Assert(plaintext.Length >= ciphertext.Length - Aead.TAG_SIZE); Span<byte> nonce = stackalloc byte[Aead.NONCE_SIZE]; BinaryPrimitives.WriteUInt64LittleEndian(nonce.Slice(4), _nonce); var cipher = new ChaCha20Poly1305(_key); var cipherTextWithoutTag = ciphertext.Slice(0, ciphertext.Length - Aead.TAG_SIZE); var tag = ciphertext.Slice(ciphertext.Length - Aead.TAG_SIZE); _logger.LogDebug($"Decrypting plain text with length of {plaintext.Length} with nonce {_nonce}"); cipher.Decrypt(nonce, cipherTextWithoutTag, tag, plaintext, ad); _nonce++; return cipherTextWithoutTag.Length; } } }
import 'package:flutter/widgets.dart'; abstract class MultipassBuildWidget extends StatefulWidget { const MultipassBuildWidget({ Key? key }) : super(key: key); @override MultipassBuildElement createElement() => MultipassBuildElement(this); @override MultipassBuildState createState(); } class MultipassBuildElement extends StatefulElement { MultipassBuildElement(MultipassBuildWidget widget) : super(widget); late bool Function() afterPass; @override void performRebuild() { do { super.performRebuild(); } while (afterPass.call()); } } abstract class MultipassBuildState<T extends StatefulWidget> extends State<T> { MultipassBuildElement get _element => context as MultipassBuildElement; @override void initState() { super.initState(); _element.afterPass = afterPass; } bool afterPass(); }
package dns import ( "fmt" "github.com/alibaba/kt-connect/pkg/common" opt "github.com/alibaba/kt-connect/pkg/kt/options" "github.com/alibaba/kt-connect/pkg/kt/service/cluster" "github.com/alibaba/kt-connect/pkg/kt/util" "github.com/rs/zerolog/log" "io/ioutil" "os" "os/signal" "strconv" "strings" "syscall" ) const ( resolverDir = "/etc/resolver" ktResolverPrefix = "kt." resolverComment = "# Generated by KtConnect" ) // SetNameServer set dns server records func (s *Cli) SetNameServer(dnsServer string) error { dnsSignal := make(chan error) if err := util.CreateDirIfNotExist(resolverDir); err != nil { log.Error().Err(err).Msgf("Failed to create resolver dir") return err } go func() { namespaces, err := cluster.Ins().GetAllNamespaces() if err != nil { dnsSignal <-err return } preferredDnsInfo := strings.Split(dnsServer, ":") dnsIp := preferredDnsInfo[0] dnsPort := strconv.Itoa(common.StandardDnsPort) if len(preferredDnsInfo) > 1 { dnsPort = preferredDnsInfo[1] } createResolverFile("local", opt.Get().ConnectOptions.ClusterDomain, dnsIp, dnsPort) for _, ns := range namespaces.Items { createResolverFile(fmt.Sprintf("%s.local", ns.Name), ns.Name, dnsIp, dnsPort) } dnsSignal <- nil defer s.RestoreNameServer() sigCh := make(chan os.Signal, 1) signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM) <-sigCh }() return <-dnsSignal } func createResolverFile(postfix, domain, dnsIp, dnsPort string) { resolverFile := fmt.Sprintf("%s/%s%s", resolverDir, ktResolverPrefix, postfix) if _, err := os.Stat(resolverFile); err == nil { _ = os.Remove(resolverFile) } resolverContent := fmt.Sprintf("%s\ndomain %s\nnameserver %s\nport %s\n", resolverComment, domain, dnsIp, dnsPort) if err := ioutil.WriteFile(resolverFile, []byte(resolverContent), 0644); err != nil { log.Warn().Err(err).Msgf("Failed to create resolver file of %s", domain) } } // RestoreNameServer remove the nameservers added by ktctl func (s *Cli) RestoreNameServer() { rd, _ := ioutil.ReadDir(resolverDir) for _, f := range rd { if !f.IsDir() && strings.HasPrefix(f.Name(), ktResolverPrefix) { if err := os.Remove(fmt.Sprintf("%s/%s", resolverDir, f.Name())); err != nil { log.Warn().Err(err).Msgf("Failed to remove resolver file %s", f.Name()) } } } }
''' Wrapper/entry point for WSGI servers like Gunicorn. Can launch multiple modules at once, but requires environment variables to be set to do so. 2019-20 Benjamin Kellenberger ''' ''' import resources and initialize app ''' import os from bottle import Bottle from setup.migrate_aide import migrate_aide from util.configDef import Config from modules import REGISTERED_MODULES, Database from constants.version import AIDE_VERSION def _verify_unique(instances, moduleClass): ''' Compares the newly requested module, address and port against already launched modules on this instance. Raises an Exception if another module from the same type has already been launched on this instance ''' for key in instances.keys(): instance = instances[key] if moduleClass.__class__.__name__ == instance.__class__.__name__: raise Exception('Module {} already launched on this server.'.format(moduleClass.__class__.__name__)) # load configuration config = Config() # check if config file points to unmigrated v1 project dbConnector = Database(config) hasAdminTable = dbConnector.execute(''' SELECT EXISTS ( SELECT FROM information_schema.tables WHERE table_schema = 'aide_admin' AND table_name = 'project' ); ''', None, 1) if not hasAdminTable[0]['exists']: # not (yet) migrated, raise Exception with instructions to ensure compatibility print(f''' The current installation of AIDE: database host: {config.getProperty('Database', 'host')} database name: {config.getProperty('Database', 'name')} schema: {config.getProperty('Database', 'schema', str, '(not specified)')} points to an installation of the legacy AIDE v1. If you wish to continue using AIDE v2, you have to upgrade the project accordingly. For instructions to do so, see here: https://github.com/microsoft/aerial_wildlife_detection/blob/multiProject/doc/upgrade_from_v1.md ''') import sys sys.exit(1) # check if project has been migrated dbSchema = config.getProperty('Database', 'schema', str, None) if dbSchema is not None: isMigrated = dbConnector.execute(''' SELECT COUNT(*) AS cnt FROM aide_admin.project WHERE shortname = %s; ''', (dbSchema,), 1) if isMigrated is not None and len(isMigrated) and isMigrated[0]['cnt'] == 0: print(f''' WARNING: the selected configuration .ini file ("{os.environ['AIDE_CONFIG_PATH']}") points to a project that has not yet been migrated to AIDE v2. Details: database host: {config.getProperty('Database', 'host')} database name: {config.getProperty('Database', 'name')} schema: {dbSchema} If you wish to continue using AIDE v2 for this project, you have to upgrade it to v2 accordingly. For instructions to do so, see here: https://github.com/microsoft/aerial_wildlife_detection/blob/multiProject/doc/upgrade_from_v1.md ''') # bring AIDE up-to-date warnings, errors = migrate_aide() if len(warnings) or len(errors): print(f'Warnings and/or errors occurred while updating AIDE to the latest version ({AIDE_VERSION}):') print('\nWarnings:') for w in warnings: print(f'\t"{w}"') print('\nErrors:') for e in errors: print(f'\t"{e}"') # prepare bottle app = Bottle() # parse requested instances instance_args = os.environ['AIDE_MODULES'].split(',') instances = {} # create user handler userHandler = REGISTERED_MODULES['UserHandler'](config, app) for i in instance_args: moduleName = i.strip() if moduleName == 'UserHandler': continue moduleClass = REGISTERED_MODULES[moduleName] # verify _verify_unique(instances, moduleClass) # create instance instance = moduleClass(config, app) instances[moduleName] = instance # add authentication functionality if hasattr(instance, 'addLoginCheckFun'): instance.addLoginCheckFun(userHandler.checkAuthenticated) # launch project meta modules if moduleName == 'LabelUI': aideAdmin = REGISTERED_MODULES['AIDEAdmin'](config, app) aideAdmin.addLoginCheckFun(userHandler.checkAuthenticated) reception = REGISTERED_MODULES['Reception'](config, app) reception.addLoginCheckFun(userHandler.checkAuthenticated) configurator = REGISTERED_MODULES['ProjectConfigurator'](config, app) configurator.addLoginCheckFun(userHandler.checkAuthenticated) statistics = REGISTERED_MODULES['ProjectStatistics'](config, app) statistics.addLoginCheckFun(userHandler.checkAuthenticated) elif moduleName == 'FileServer': from modules.DataAdministration.backend import celery_interface as daa_int elif moduleName == 'AIController': from modules.AIController.backend import celery_interface as aic_int # launch model marketplace with AIController modelMarketplace = REGISTERED_MODULES['ModelMarketplace'](config, app) modelMarketplace.addLoginCheckFun(userHandler.checkAuthenticated) elif moduleName == 'AIWorker': from modules.AIWorker.backend import celery_interface as aiw_int # launch globally required modules dataAdmin = REGISTERED_MODULES['DataAdministrator'](config, app) dataAdmin.addLoginCheckFun(userHandler.checkAuthenticated) staticFiles = REGISTERED_MODULES['StaticFileServer'](config, app) staticFiles.addLoginCheckFun(userHandler.checkAuthenticated) if __name__ == '__main__': # run using server selected by Bottle host = config.getProperty('Server', 'host') port = config.getProperty('Server', 'port') app.run(host=host, port=port)
package com.dyh.permutationandcombination; /** * 孤傲的A练习题:A(A也是他的编号)是一个孤傲的人,在一个n个人(其中编号依次为1到n)的队列中,他于其中的标号为b和标号c的人都有矛盾,所以他不会和他们站在相邻的位置。 * 现在问你满足A的要求的对列有多少种? * 给定人数n和三个人的标号A,b和c,请返回所求答案,保证人数小于等于11且大于等于3。 * 例:6,1,2,3 * 返回:288 * 思路:利用排列组合 * @author dyh * */ public class LonelyA { public static int getWays(int n, int a, int b, int c) { // write code here int res = 0; //n个人全排列总数 int sumN = fac(n); //a和b作为一个人的总数,因为可以是ab,也可以是ba,所以总数乘2 int sumab = fac(n-1) * 2; //ac作为一个人的总数 int sumac = fac(n-1)*2; //ab和ac共有的部分: bac作为一个人,cab作为一个人 int sum = fac(n-2)*2; res = sumN - sumab - sumac + sum; return res; } public static int fac(int s) { int res = s; for(int i = s-1; i > 0; i--) { res = res * i; } return res; } public static void main(String[] args) { // TODO Auto-generated method stub int n = 6; int a = 1,b=2,c = 3; System.out.println(getWays(n, a, b, c)); } }
// $Id: AwincIni.h 1.6.1.1 2012/03/08 06:55:15EST 277133 Development 277133(2012/03/08 06:55:49EST) $ // // Declarations for class to manage average wage increases in ini file. #pragma once #ifndef __AWINCINI_H #define __AWINCINI_H #include "AwincDoc.h" // Description: Manages the file of projected average wage // increases stored in the registry or an ini file. // // Remarks: Manages the file of projected average wage // increases stored in the registry or an ini file. class AwincIni : public AwincDoc { protected: // Ini section heading. CString section; // Name of yearly entry. CString entry; // Name of title entry. static const char *sectionTitle; public: AwincIni( int newIstart, int newMaxyears ); AwincIni( const AwincDoc& newAwincDoc ); const AwincIni& operator=( const AwincDoc& newAwincDoc ); void clear(); void read( int altnum ); void setData( int altnum ); void setData( int altNum, const DoubleAnnual& newData ); void setTitle( int altNum ); void setTitle( int altNum, const std::string& newTitle ); void write( int ); }; #endif // __AWINCINI_H
/* * Copyright (c) haipham 2019. All rights reserved. * Any attempt to reproduce this source code in any form shall be met with legal actions. */ package org.swiften.redux.android.dagger /** Created by viethai.pham on 2019/02/21 */ class DependencyLevel1(private val tag: String, val sd: DependencyLevel2) { fun finalize() { println("Redux: Finalized dependency for ${this.tag}") } } class DependencyLevel2(val sd: DependencyLevel3) class DependencyLevel3
package com.td.stereotosurround.util /** * Created by TAPOS DATTA on 04,August,2021 */ class CustomBuffer(var capacity: Int) { private var bufferSize: Int = 0 private val buffer = FloatArray(capacity) fun getSamples(sampleSize: Int): FloatArray { if (sampleSize > bufferSize || bufferSize < 1) { throw IndexOutOfBoundsException("Couldn't fetch sample data.") } val data = FloatArray(sampleSize) System.arraycopy(buffer, 0, data, 0, sampleSize) updateSampleArray(sampleSize) return data } private fun updateSampleArray(sampleSize: Int) { // remove fetched data from buffer // i.e. number of #sampleSize of data will be replaced by remaining data // by following FIFO approach val remainingDataSize: Int = bufferSize - sampleSize if (remainingDataSize > 0) { System.arraycopy(buffer, sampleSize, buffer, 0, remainingDataSize) } bufferSize -= sampleSize } fun addSamples(data: FloatArray?) { if (data == null) return if (bufferSize + data.size >= capacity) { throw IndexOutOfBoundsException("Sample size is not enough to add data.") } System.arraycopy(data, 0, buffer, bufferSize, data.size) bufferSize += data.size } fun getSize(): Int { return bufferSize } fun getSamples(dest: FloatArray, sampleSize: Int) { if (dest.size < sampleSize || sampleSize > bufferSize ) { throw IndexOutOfBoundsException("Destination is not capable to load samples.") } System.arraycopy(buffer, 0, dest, 0, sampleSize) updateSampleArray(sampleSize) } fun clear() { bufferSize = 0 } }
const download = require('download-git-repo') const ora = require('ora') /** * * @param {string} path * @param {string} name */ function downloadRepositorie(path, name) { return new Promise((resolve, reject) => { const spinner = ora('downloading template...') spinner.start() download(path, name, { clone: true }, (err) => { if (err) { spinner.fail() reject(err) } spinner.succeed() resolve() }) }) } module.exports = downloadRepositorie
#ifndef _NT35310_H_ #define _NT35310_H_ #include <stdint.h> void tft_hard_init(void); void tft_write_command(uint8_t cmd); void tft_write_byte(uint8_t *data_buf, uint32_t length); void tft_write_half(uint16_t *data_buf, uint32_t length); void tft_write_word(uint32_t *data_buf, uint32_t length, uint32_t flag); void tft_fill_data(uint32_t *data_buf, uint32_t length); #endif
# Copyright 2022 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test for vision modules.""" import io import os from absl.testing import parameterized import numpy as np from PIL import Image import tensorflow as tf from official.core import exp_factory from official.core import export_base from official.vision import registry_imports # pylint: disable=unused-import from official.vision.dataloaders import classification_input from official.vision.serving import export_module_factory class ImageClassificationExportTest(tf.test.TestCase, parameterized.TestCase): def _get_classification_module(self, input_type, input_image_size): params = exp_factory.get_exp_config('resnet_imagenet') params.task.model.backbone.resnet.model_id = 18 module = export_module_factory.create_classification_export_module( params, input_type, batch_size=1, input_image_size=input_image_size) return module def _get_dummy_input(self, input_type): """Get dummy input for the given input type.""" if input_type == 'image_tensor': return tf.zeros((1, 32, 32, 3), dtype=np.uint8) elif input_type == 'image_bytes': image = Image.fromarray(np.zeros((32, 32, 3), dtype=np.uint8)) byte_io = io.BytesIO() image.save(byte_io, 'PNG') return [byte_io.getvalue()] elif input_type == 'tf_example': image_tensor = tf.zeros((32, 32, 3), dtype=tf.uint8) encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).numpy() example = tf.train.Example( features=tf.train.Features( feature={ 'image/encoded': tf.train.Feature( bytes_list=tf.train.BytesList(value=[encoded_jpeg])), })).SerializeToString() return [example] @parameterized.parameters( {'input_type': 'image_tensor'}, {'input_type': 'image_bytes'}, {'input_type': 'tf_example'}, ) def test_export(self, input_type='image_tensor'): input_image_size = [32, 32] tmp_dir = self.get_temp_dir() module = self._get_classification_module(input_type, input_image_size) # Test that the model restores any attrs that are trackable objects # (eg: tables, resource variables, keras models/layers, tf.hub modules). module.model.test_trackable = tf.keras.layers.InputLayer(input_shape=(4,)) ckpt_path = tf.train.Checkpoint(model=module.model).save( os.path.join(tmp_dir, 'ckpt')) export_dir = export_base.export( module, [input_type], export_savedmodel_dir=tmp_dir, checkpoint_path=ckpt_path, timestamped=False) self.assertTrue(os.path.exists(os.path.join(tmp_dir, 'saved_model.pb'))) self.assertTrue(os.path.exists( os.path.join(tmp_dir, 'variables', 'variables.index'))) self.assertTrue(os.path.exists( os.path.join(tmp_dir, 'variables', 'variables.data-00000-of-00001'))) imported = tf.saved_model.load(export_dir) classification_fn = imported.signatures['serving_default'] images = self._get_dummy_input(input_type) def preprocess_image_fn(inputs): return classification_input.Parser.inference_fn( inputs, input_image_size, num_channels=3) processed_images = tf.map_fn( preprocess_image_fn, elems=tf.zeros([1] + input_image_size + [3], dtype=tf.uint8), fn_output_signature=tf.TensorSpec( shape=input_image_size + [3], dtype=tf.float32)) expected_logits = module.model(processed_images, training=False) expected_prob = tf.nn.softmax(expected_logits) out = classification_fn(tf.constant(images)) # The imported model should contain any trackable attrs that the original # model had. self.assertTrue(hasattr(imported.model, 'test_trackable')) self.assertAllClose( out['logits'].numpy(), expected_logits.numpy(), rtol=1e-04, atol=1e-04) self.assertAllClose( out['probs'].numpy(), expected_prob.numpy(), rtol=1e-04, atol=1e-04) if __name__ == '__main__': tf.test.main()
/**** * ExapisSOP * Copyright (C) 2020 Takym. * * distributed under the MIT License. ****/ using System; using System.IO; namespace ExapisSOP.IO.Settings.CommandLine { /// <summary> /// コマンド行引数のオプションを表します。 /// </summary> public class Option { /// <summary> /// このオプションの名前を取得します。 /// </summary> public string Name { get; } /// <summary> /// このオプションに登録されている値を取得します。 /// </summary> public Value[] Values { get; } /// <summary> /// 型'<see cref="ExapisSOP.IO.Settings.CommandLine.Option"/>'の新しいインスタンスを生成します。 /// </summary> /// <param name="name">新しいオプションの名前です。</param> /// <param name="values">新しいオプションに登録する値です。</param> public Option(string name, Value[] values) { this.Name = name; this.Values = values; } /// <summary> /// オプションに登録する値を表します。 /// </summary> public class Value { /// <summary> /// 引数が応答ファイルを指し示していた場合、応答ファイルへの絶対パスを取得します。 /// それ以外の場合はコマンド行引数に格納されていた値を取得します。 /// </summary> public string Source { get; } /// <summary> /// 引数が応答ファイルを指し示していた場合、応答ファイルから読み取った値を取得します。 /// それ以外の場合はコマンド行引数に格納されていた値を取得します。 /// </summary> public string Text { get; } /// <summary> /// 型'<see cref="ExapisSOP.IO.Settings.CommandLine.Option.Value"/>'の新しいインスタンスを生成します。 /// </summary> /// <param name="arg">この値が保持すべき引数を表す文字列です。</param> public Value(string arg) { this.Source = arg; this.Text = arg; } /// <summary> /// 型'<see cref="ExapisSOP.IO.Settings.CommandLine.Option.Value"/>'の新しいインスタンスを生成します。 /// </summary> /// <param name="stream">応答ファイルを表すストリームです。</param> /// <exception cref="System.ArgumentNullException"/> public Value(Stream stream) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } var sr = new StreamReader(stream, true); this.Source = stream is FileStream fs ? fs.Name : stream.GetType().FullName ?? string.Empty; this.Text = sr.ReadToEnd(); } } } }
// Copyright 2017 The Dawn Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "dawn_native/opengl/RenderPipelineGL.h" #include "dawn_native/opengl/DeviceGL.h" #include "dawn_native/opengl/Forward.h" #include "dawn_native/opengl/PersistentPipelineStateGL.h" #include "dawn_native/opengl/UtilsGL.h" namespace dawn_native { namespace opengl { namespace { GLenum GLPrimitiveTopology(wgpu::PrimitiveTopology primitiveTopology) { switch (primitiveTopology) { case wgpu::PrimitiveTopology::PointList: return GL_POINTS; case wgpu::PrimitiveTopology::LineList: return GL_LINES; case wgpu::PrimitiveTopology::LineStrip: return GL_LINE_STRIP; case wgpu::PrimitiveTopology::TriangleList: return GL_TRIANGLES; case wgpu::PrimitiveTopology::TriangleStrip: return GL_TRIANGLE_STRIP; default: UNREACHABLE(); } } void ApplyFrontFaceAndCulling(const OpenGLFunctions& gl, wgpu::FrontFace face, wgpu::CullMode mode) { if (mode == wgpu::CullMode::None) { gl.Disable(GL_CULL_FACE); } else { gl.Enable(GL_CULL_FACE); // Note that we invert winding direction in OpenGL. Because Y axis is up in OpenGL, // which is different from WebGPU and other backends (Y axis is down). GLenum direction = (face == wgpu::FrontFace::CCW) ? GL_CW : GL_CCW; gl.FrontFace(direction); GLenum cullMode = (mode == wgpu::CullMode::Front) ? GL_FRONT : GL_BACK; gl.CullFace(cullMode); } } GLenum GLBlendFactor(wgpu::BlendFactor factor, bool alpha) { switch (factor) { case wgpu::BlendFactor::Zero: return GL_ZERO; case wgpu::BlendFactor::One: return GL_ONE; case wgpu::BlendFactor::SrcColor: return GL_SRC_COLOR; case wgpu::BlendFactor::OneMinusSrcColor: return GL_ONE_MINUS_SRC_COLOR; case wgpu::BlendFactor::SrcAlpha: return GL_SRC_ALPHA; case wgpu::BlendFactor::OneMinusSrcAlpha: return GL_ONE_MINUS_SRC_ALPHA; case wgpu::BlendFactor::DstColor: return GL_DST_COLOR; case wgpu::BlendFactor::OneMinusDstColor: return GL_ONE_MINUS_DST_COLOR; case wgpu::BlendFactor::DstAlpha: return GL_DST_ALPHA; case wgpu::BlendFactor::OneMinusDstAlpha: return GL_ONE_MINUS_DST_ALPHA; case wgpu::BlendFactor::SrcAlphaSaturated: return GL_SRC_ALPHA_SATURATE; case wgpu::BlendFactor::BlendColor: return alpha ? GL_CONSTANT_ALPHA : GL_CONSTANT_COLOR; case wgpu::BlendFactor::OneMinusBlendColor: return alpha ? GL_ONE_MINUS_CONSTANT_ALPHA : GL_ONE_MINUS_CONSTANT_COLOR; default: UNREACHABLE(); } } GLenum GLBlendMode(wgpu::BlendOperation operation) { switch (operation) { case wgpu::BlendOperation::Add: return GL_FUNC_ADD; case wgpu::BlendOperation::Subtract: return GL_FUNC_SUBTRACT; case wgpu::BlendOperation::ReverseSubtract: return GL_FUNC_REVERSE_SUBTRACT; case wgpu::BlendOperation::Min: return GL_MIN; case wgpu::BlendOperation::Max: return GL_MAX; default: UNREACHABLE(); } } void ApplyColorState(const OpenGLFunctions& gl, uint32_t attachment, const ColorStateDescriptor* descriptor) { if (BlendEnabled(descriptor)) { gl.Enablei(GL_BLEND, attachment); gl.BlendEquationSeparatei(attachment, GLBlendMode(descriptor->colorBlend.operation), GLBlendMode(descriptor->alphaBlend.operation)); gl.BlendFuncSeparatei(attachment, GLBlendFactor(descriptor->colorBlend.srcFactor, false), GLBlendFactor(descriptor->colorBlend.dstFactor, false), GLBlendFactor(descriptor->alphaBlend.srcFactor, true), GLBlendFactor(descriptor->alphaBlend.dstFactor, true)); } else { gl.Disablei(GL_BLEND, attachment); } gl.ColorMaski(attachment, descriptor->writeMask & wgpu::ColorWriteMask::Red, descriptor->writeMask & wgpu::ColorWriteMask::Green, descriptor->writeMask & wgpu::ColorWriteMask::Blue, descriptor->writeMask & wgpu::ColorWriteMask::Alpha); } GLuint OpenGLStencilOperation(wgpu::StencilOperation stencilOperation) { switch (stencilOperation) { case wgpu::StencilOperation::Keep: return GL_KEEP; case wgpu::StencilOperation::Zero: return GL_ZERO; case wgpu::StencilOperation::Replace: return GL_REPLACE; case wgpu::StencilOperation::Invert: return GL_INVERT; case wgpu::StencilOperation::IncrementClamp: return GL_INCR; case wgpu::StencilOperation::DecrementClamp: return GL_DECR; case wgpu::StencilOperation::IncrementWrap: return GL_INCR_WRAP; case wgpu::StencilOperation::DecrementWrap: return GL_DECR_WRAP; default: UNREACHABLE(); } } void ApplyDepthStencilState(const OpenGLFunctions& gl, const DepthStencilStateDescriptor* descriptor, PersistentPipelineState* persistentPipelineState) { // Depth writes only occur if depth is enabled if (descriptor->depthCompare == wgpu::CompareFunction::Always && !descriptor->depthWriteEnabled) { gl.Disable(GL_DEPTH_TEST); } else { gl.Enable(GL_DEPTH_TEST); } if (descriptor->depthWriteEnabled) { gl.DepthMask(GL_TRUE); } else { gl.DepthMask(GL_FALSE); } gl.DepthFunc(ToOpenGLCompareFunction(descriptor->depthCompare)); if (StencilTestEnabled(descriptor)) { gl.Enable(GL_STENCIL_TEST); } else { gl.Disable(GL_STENCIL_TEST); } GLenum backCompareFunction = ToOpenGLCompareFunction(descriptor->stencilBack.compare); GLenum frontCompareFunction = ToOpenGLCompareFunction(descriptor->stencilFront.compare); persistentPipelineState->SetStencilFuncsAndMask( gl, backCompareFunction, frontCompareFunction, descriptor->stencilReadMask); gl.StencilOpSeparate(GL_BACK, OpenGLStencilOperation(descriptor->stencilBack.failOp), OpenGLStencilOperation(descriptor->stencilBack.depthFailOp), OpenGLStencilOperation(descriptor->stencilBack.passOp)); gl.StencilOpSeparate(GL_FRONT, OpenGLStencilOperation(descriptor->stencilFront.failOp), OpenGLStencilOperation(descriptor->stencilFront.depthFailOp), OpenGLStencilOperation(descriptor->stencilFront.passOp)); gl.StencilMask(descriptor->stencilWriteMask); } } // anonymous namespace RenderPipeline::RenderPipeline(Device* device, const RenderPipelineDescriptor* descriptor) : RenderPipelineBase(device, descriptor), mVertexArrayObject(0), mGlPrimitiveTopology(GLPrimitiveTopology(GetPrimitiveTopology())) { PerStage<const ShaderModule*> modules(nullptr); modules[SingleShaderStage::Vertex] = ToBackend(descriptor->vertexStage.module); modules[SingleShaderStage::Fragment] = ToBackend(descriptor->fragmentStage->module); PipelineGL::Initialize(device->gl, ToBackend(GetLayout()), modules); CreateVAOForVertexState(descriptor->vertexState); } RenderPipeline::~RenderPipeline() { const OpenGLFunctions& gl = ToBackend(GetDevice())->gl; gl.DeleteVertexArrays(1, &mVertexArrayObject); gl.BindVertexArray(0); } GLenum RenderPipeline::GetGLPrimitiveTopology() const { return mGlPrimitiveTopology; } void RenderPipeline::CreateVAOForVertexState(const VertexStateDescriptor* vertexState) { const OpenGLFunctions& gl = ToBackend(GetDevice())->gl; gl.GenVertexArrays(1, &mVertexArrayObject); gl.BindVertexArray(mVertexArrayObject); for (uint32_t location : IterateBitSet(GetAttributeLocationsUsed())) { const auto& attribute = GetAttribute(location); gl.EnableVertexAttribArray(location); attributesUsingVertexBuffer[attribute.vertexBufferSlot][location] = true; const VertexBufferInfo& vertexBuffer = GetVertexBuffer(attribute.vertexBufferSlot); if (vertexBuffer.arrayStride == 0) { // Emulate a stride of zero (constant vertex attribute) by // setting the attribute instance divisor to a huge number. gl.VertexAttribDivisor(location, 0xffffffff); } else { switch (vertexBuffer.stepMode) { case wgpu::InputStepMode::Vertex: break; case wgpu::InputStepMode::Instance: gl.VertexAttribDivisor(location, 1); break; default: UNREACHABLE(); } } } } void RenderPipeline::ApplyNow(PersistentPipelineState& persistentPipelineState) { const OpenGLFunctions& gl = ToBackend(GetDevice())->gl; PipelineGL::ApplyNow(gl); ASSERT(mVertexArrayObject); gl.BindVertexArray(mVertexArrayObject); ApplyFrontFaceAndCulling(gl, GetFrontFace(), GetCullMode()); ApplyDepthStencilState(gl, GetDepthStencilStateDescriptor(), &persistentPipelineState); gl.SampleMaski(0, GetSampleMask()); for (uint32_t attachmentSlot : IterateBitSet(GetColorAttachmentsMask())) { ApplyColorState(gl, attachmentSlot, GetColorStateDescriptor(attachmentSlot)); } } }} // namespace dawn_native::opengl
<?php $rootDir = __DIR__ . '/../../'; require_once $rootDir . 'core/fundControlApp.php'; /* @var $FundControl FundControl */ ?> <div class="scrollable"> <div class="scrollable-content"> <div class="list-group" toggle="off" bubble target="rightSidebar"> <? /* <a class="list-group-item media" href="" ng-repeat="user in chatUsers"> <div class="pull-left"> <i class="fa fa-user chat-user-avatar"></i> </div> <div class="media-body"> <h5 class="media-heading">{{user.name}}</h5> <small ng-switch on="user.online"> <span ng-switch-when="true"> <span class="label label-success">ONLINE</span> </span> <span ng-switch-default> <span class="label label-default"><i>OFFLINE</i></span> </span> </small> </div> </a> //*/ ?> </div> </div> </div>
import {Component, OnDestroy, OnInit} from '@angular/core'; import {BroadcastService} from "@azure/msal-angular"; import {MsalService} from "@azure/msal-angular"; import {Subscription} from "rxjs/Subscription"; @Component({ selector: 'cd-root', templateUrl: './app.component.html', styles: [] }) export class AppComponent { public userInfo: any = null; public loggedIn : boolean; private subscription: Subscription; public isIframe: boolean; constructor(private broadcastService: BroadcastService , private authService : MsalService) { this.isIframe = window !== window.parent && !window.opener; if(this.authService.getUser()) { this.loggedIn = true; } else { this.loggedIn = false; } } login() { debugger; console.log("IN"); this.authService.loginPopup(["user.read" ,"api://a88bb933-319c-41b5-9f04-eff36d985612/access_as_user"]); } logout() { this.authService.logout(); } ngOnInit() { this.broadcastService.subscribe("msal:loginFailure", (payload) => { console.log("login failure " + JSON.stringify(payload)); this.loggedIn = false; }); this.broadcastService.subscribe("msal:loginSuccess", (payload) => { console.log("login success " + JSON.stringify(payload)); this.loggedIn = true; }); } ngOnDestroy() { this.broadcastService.getMSALSubject().next(1); if(this.subscription) { this.subscription.unsubscribe(); } } }
function Start-WinSubscriptionService { [CmdletBinding()] param( [System.Collections.IDictionary] $LoggerParameters ) if (-not $LoggerParameters) { $LoggerParameters = $Script:LoggerParameters } $Logger = Get-Logger @LoggerParameters $Logger.AddInfoRecord('Starting Windows Event Collector service.') $Output = Start-MyProgram -Program $Script:ProgramWecutil -cmdArgList 'qc', '/q:true' $Logger.AddInfoRecord($Output) }
package com.tunasushi.tool /** * @author TunaSashimi * @date 2020-02-02 19:49 * @Copyright 2020 TunaSashimi. All rights reserved. * @Description */ class PathTool
# -*- coding: utf-8 -*- require 'rails_helper' require 'tempfile' describe Asciicast do describe '.find_by_id_or_secret_token!' do subject { Asciicast.find_by_id_or_secret_token!(thing) } context 'for public asciicast' do let(:asciicast) { create(:asciicast, private: false) } context 'when looked up by id' do let(:thing) { asciicast.id } it { should eq(asciicast) } end context 'when looked up by secret token' do let(:thing) { asciicast.secret_token } it { should eq(asciicast) } end end context 'for private asciicast' do let(:asciicast) { create(:asciicast, private: true) } context 'when looked up by id' do let(:thing) { asciicast.id } it 'raises RecordNotFound' do expect { subject }.to raise_error(ActiveRecord::RecordNotFound) end end context 'when looked up by secret token' do let(:thing) { asciicast.secret_token } it { should eq(asciicast) } end end end describe '.generate_secret_token' do subject { Asciicast.generate_secret_token } it { should match(/^[a-z0-9]{25}$/) } end describe '.for_category_ordered' do subject { described_class.for_category_ordered(category, order) } let!(:asciicast_1) { create(:asciicast, created_at: 2.hours.ago, views_count: 10, featured: false) } let!(:asciicast_2) { create(:asciicast, created_at: 1.hour.ago, views_count: 20, featured: true) } let!(:asciicast_3) { create(:asciicast, created_at: 4.hours.ago, views_count: 30, featured: false) } let!(:asciicast_4) { create(:asciicast, created_at: 3.hours.ago, views_count: 40, featured: true) } context "when category is :all" do let(:category) { :all } context "and order is :date" do let(:order) { :date } it { should eq([asciicast_2, asciicast_1, asciicast_4, asciicast_3]) } end context "and order is :popularity" do let(:order) { :popularity } it { should eq([asciicast_4, asciicast_3, asciicast_2, asciicast_1]) } end end context "when category is :featured" do let(:category) { :featured } context "and order is :date" do let(:order) { :date } it { should eq([asciicast_2, asciicast_4]) } end context "and order is :popularity" do let(:order) { :popularity } it { should eq([asciicast_4, asciicast_2]) } end end end describe '#to_param' do subject { asciicast.to_param } let(:asciicast) { Asciicast.new(id: 123, secret_token: 'sekrit') } context 'for public asciicast' do before do asciicast.private = false end it { should eq('123') } end context 'for private asciicast' do before do asciicast.private = true end it { should eq('sekrit') } end end describe '#stdout' do context 'for single-file, JSON asciicast' do let(:asciicast) { create(:asciicast) } subject { asciicast.stdout.to_a } it 'is enumerable with [delay, data] pair as every item' do expect(subject).to eq([ [1.234567, "foo bar"], [5.678987, "baz qux"], [3.456789, "żółć jaźń"], ]) end end context 'for multi-file, legacy asciicast' do let(:asciicast) { create(:legacy_asciicast) } subject { asciicast.stdout.to_a } it 'is enumerable with [delay, data] pair as every item' do expect(subject).to eq([ [1.234567, "foobar"], [0.123456, "baz"], [2.345678, "qux"], ]) end end end describe '#theme' do it 'returns proper theme when theme_name is not blank' do asciicast = described_class.new(theme_name: 'tango') expect(asciicast.theme.name).to eq('tango') end it 'returns nil when theme_name is blank' do asciicast = described_class.new(theme_name: '') expect(asciicast.theme).to be(nil) end end end
use std::ops::{AddAssign, SubAssign}; #[repr(C)] #[derive(Copy, Clone)] pub union R16 { w: u16, b: [u8; 2] } #[cfg(target_endian="little")] const LO_IDX : usize = 0; #[cfg(target_endian="big")] const LO_IDX : usize = 1; const HI_IDX : usize = 1 - LO_IDX; impl R16 { #[inline] pub fn from_bytes(lo: u8, hi: u8) -> R16 { let mut r = R16::default(); r.set_lo(lo); r.set_hi(hi); r } #[inline] pub fn as_u16(self) -> u16 { unsafe { self.w } } #[inline] pub fn set(&mut self, w: u16) { self.w = w; } #[inline] pub fn lo(self) -> u8 { unsafe { self.b[LO_IDX] } } #[inline] pub fn hi(self) -> u8 { unsafe { self.b[HI_IDX] } } #[inline] pub fn set_lo(&mut self, b: u8) { unsafe { self.b[LO_IDX] = b; } } #[inline] pub fn set_hi(&mut self, b: u8) { unsafe { self.b[HI_IDX] = b; } } } impl Default for R16 { #[inline] fn default() -> Self { R16{ w: 0 } } } impl From<R16> for u16 { #[inline] fn from(r: R16) -> Self { r.as_u16() } } impl From<u16> for R16 { #[inline] fn from(w: u16) -> Self { R16{ w } } } impl AddAssign<u16> for R16 { #[inline] fn add_assign(&mut self, r: u16) { let w = self.as_u16().wrapping_add(r); self.set(w); } } impl SubAssign<u16> for R16 { #[inline] fn sub_assign(&mut self, r: u16) { let w = self.as_u16().wrapping_sub(r); self.set(w); } }
package com.ipincloud.iotbj.srv.domain; import java.io.Serializable; import java.math.BigDecimal; import java.sql.Time; import java.sql.Date; import java.sql.Timestamp; import com.alibaba.fastjson.annotation.JSONField; //(SyncDevice) //generate by redcloud,2020-07-24 19:59:20 public class SyncDevice implements Serializable { private static final long serialVersionUID = 68L; // 资源类型 private String resourceType ; // 资源唯一标志 private String indexCode ; // 用户名 private String userName ; // 资源名称 private String name ; // 所属区域编号 private String regionIndexCode ; // 所属区域路径 private String regionPath ; // 区域路径名称 private String regionPathName ; // 设备IP private String ip ; // 设备端口 private String port ; // 设备接入协议 private String treatyType ; // 所属网域ID private String netZoneId ; // 厂商 private String manufacturer ; // 创建时间 private String createTime ; // 更新时间 private String updateTime ; public String getResourceType() { return resourceType ; } public void setResourceType(String resourceType) { this.resourceType = resourceType; } public String getIndexCode() { return indexCode ; } public void setIndexCode(String indexCode) { this.indexCode = indexCode; } public String getUserName() { return userName ; } public void setUserName(String userName) { this.userName = userName; } public String getName() { return name ; } public void setName(String name) { this.name = name; } public String getRegionIndexCode() { return regionIndexCode ; } public void setRegionIndexCode(String regionIndexCode) { this.regionIndexCode = regionIndexCode; } public String getRegionPath() { return regionPath ; } public void setRegionPath(String regionPath) { this.regionPath = regionPath; } public String getRegionPathName() { return regionPathName ; } public void setRegionPathName(String regionPathName) { this.regionPathName = regionPathName; } public String getIp() { return ip ; } public void setIp(String ip) { this.ip = ip; } public String getPort() { return port ; } public void setPort(String port) { this.port = port; } public String getTreatyType() { return treatyType ; } public void setTreatyType(String treatyType) { this.treatyType = treatyType; } public String getNetZoneId() { return netZoneId ; } public void setNetZoneId(String netZoneId) { this.netZoneId = netZoneId; } public String getManufacturer() { return manufacturer ; } public void setManufacturer(String manufacturer) { this.manufacturer = manufacturer; } public String getCreateTime() { return createTime ; } public void setCreateTime(String createTime) { this.createTime = createTime; } public String getUpdateTime() { return updateTime ; } public void setUpdateTime(String updateTime) { this.updateTime = updateTime; } }
@extends('layout') @section('content') <div class="row d-flex align-items-center" style="height: 500px"> <div class="col d-flex justify-content-center"> <a class="btn btn-lg" href="auth" style="background-color: #6441a5; color: white">Login with Twitch</a> </div> </div> @endsection
-- | -- Module : Jikka.Core.Parse.Token -- Description : defines tokens of our core language. / core 言語の字句要素を定義します。 -- Copyright : (c) Kimiyuki Onaka, 2020 -- License : Apache License 2.0 -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable module Jikka.Core.Parse.Token where import Jikka.Common.Location data Operator = -- arithmetic operators Plus | Minus | Mult | FloorDiv | FloorMod | CeilDiv | CeilMod | JustDiv | Pow | -- boolean operators And | Or | -- bit operators BitNot | BitAnd | BitOr | BitXor | BitLShift | BitRShift | -- comparators DoubleEqual | NotEqual | LessThan | LessEqual | GreaterThan | GreaterEqual deriving (Eq, Ord, Show, Read) -- | We don't have to classify tokens in detail, but it's convenient for testing and debugging. data Token = -- identifier Ident String | -- literals Int Integer | Bool Bool | String String | -- keywords Let | Rec | In | If | Then | Else | Fun | Dot | Assert | Forall | -- punctuations Arrow | Equal | Colon | Comma | Underscore | BackArrow | At | -- parens OpenBracket | OpenParen | CloseBracket | CloseParen | -- operators Operator Operator deriving (Eq, Ord, Show, Read) type Token' = WithLoc Token
import React from "react" import { graphql, Link } from "gatsby" import Layout from "../components/layout" import SEO from "../components/seo" const FetchRemoteB = ({ data }) => { return ( <Layout> <SEO title="Fetch Remote B" /> <pre dangerouslySetInnerHTML={{ __html: JSON.stringify(data, null, 2) }} /> <Link to="/">Go back to the homepage</Link> </Layout> ) } export default FetchRemoteB export const pageQuery = graphql` { allMyRemoteFile { nodes { url publicUrl } } } `
use crate::errors::RustusError; use actix_web::{ http::header::{CacheControl, CacheDirective}, web, HttpRequest, HttpResponse, }; use futures::stream::empty; use crate::{RustusResult, State}; pub async fn get_file_info( state: web::Data<State>, request: HttpRequest, ) -> RustusResult<HttpResponse> { // Getting file id from URL. if request.match_info().get("file_id").is_none() { return Err(RustusError::FileNotFound); } let file_id = request.match_info().get("file_id").unwrap(); // Getting file info from info_storage. let file_info = state.info_storage.get_info(file_id).await?; if file_info.storage != state.data_storage.to_string() { return Err(RustusError::FileNotFound); } let mut builder = HttpResponse::Ok(); if file_info.is_partial { builder.insert_header(("Upload-Concat", "partial")); } if file_info.is_final && file_info.parts.is_some() { #[allow(clippy::or_fun_call)] let parts = file_info .parts .clone() .unwrap() .iter() .map(|file| format!("/{}/{}/", state.config.base_url(), file.as_str())) .collect::<Vec<String>>() .join(" "); builder.insert_header(("Upload-Concat", format!("final; {}", parts))); } builder .no_chunking(file_info.offset as u64) .insert_header(("Upload-Offset", file_info.offset.to_string())); // Upload length is known. if let Some(upload_len) = file_info.length { builder .no_chunking(upload_len as u64) .insert_header(("Content-Length", file_info.offset.to_string())) .insert_header(("Upload-Length", upload_len.to_string())); } else { builder.insert_header(("Upload-Defer-Length", "1")); } if let Some(meta) = file_info.get_metadata_string() { builder.insert_header(("Upload-Metadata", meta)); } builder.insert_header(CacheControl(vec![CacheDirective::NoCache])); Ok(builder.streaming(empty::<RustusResult<web::Bytes>>())) } #[cfg(test)] mod tests { use actix_web::http::{Method, StatusCode}; use crate::{rustus_service, State}; use actix_web::{ test::{call_service, init_service, TestRequest}, App, }; #[actix_rt::test] async fn success() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.offset = 100; file_info.length = Some(100); state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; let offset = response .headers() .get("Upload-Offset") .unwrap() .to_str() .unwrap() .parse::<usize>() .unwrap(); assert_eq!(file_info.offset, offset) } #[actix_rt::test] async fn success_metadata() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.offset = 100; file_info.length = Some(100); file_info.metadata.insert("test".into(), "value".into()); state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; let metadata = response .headers() .get("Upload-Metadata") .unwrap() .to_str() .unwrap(); assert_eq!( String::from(metadata), format!("{} {}", "test", base64::encode("value")) ) } #[actix_rt::test] async fn success_defer_len() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.deferred_size = true; file_info.length = None; state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; assert_eq!( response .headers() .get("Upload-Defer-Length") .unwrap() .to_str() .unwrap(), "1" ); } #[actix_rt::test] async fn test_get_file_info_partial() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.is_partial = true; state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; assert_eq!( response .headers() .get("Upload-Concat") .unwrap() .to_str() .unwrap(), "partial" ); } #[actix_rt::test] async fn success_final() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.is_partial = false; file_info.is_final = true; file_info.parts = Some(vec!["test1".into(), "test2".into()]); state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; assert_eq!( response .headers() .get("Upload-Concat") .unwrap() .to_str() .unwrap(), format!( "final; {} {}", state.config.file_url("test1"), state.config.file_url("test2") ) .as_str() ); } #[actix_rt::test] async fn no_file() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let request = TestRequest::with_uri(state.config.file_url("unknknown").as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; assert_eq!(response.status(), StatusCode::NOT_FOUND); } #[actix_rt::test] async fn test_get_file_info_wrong_storage() { let state = State::test_new().await; let mut rustus = init_service(App::new().configure(rustus_service(state.clone()))).await; let mut file_info = state.create_test_file().await; file_info.storage = String::from("unknown"); state .info_storage .set_info(&file_info, false) .await .unwrap(); let request = TestRequest::with_uri(state.config.file_url(file_info.id.as_str()).as_str()) .method(Method::HEAD) .to_request(); let response = call_service(&mut rustus, request).await; assert_eq!(response.status(), StatusCode::NOT_FOUND); } }
// See LICENSE.Berkeley for license details. // See LICENSE.SiFive for license details. package uncore.agents import Chisel._ import uncore.coherence._ import uncore.tilelink._ import uncore.constants._ import uncore.util._ import util._ import config._ class L2BroadcastHub(implicit p: Parameters) extends HierarchicalCoherenceAgent()(p) { // Create TSHRs for outstanding transactions val irelTrackerList = (0 until nReleaseTransactors).map(id => Module(new BufferedBroadcastVoluntaryReleaseTracker(id))) val iacqTrackerList = (nReleaseTransactors until nTransactors).map(id => Module(new BufferedBroadcastAcquireTracker(id))) val trackerList = irelTrackerList ++ iacqTrackerList // Propagate incoherence flags trackerList.map(_.io.incoherent) foreach { _ := io.incoherent } // Create an arbiter for the one memory port val outerList = trackerList.map(_.io.outer) val outer_arb = Module(new ClientTileLinkIOArbiter(outerList.size) (p.alterPartial({ case TLId => p(OuterTLId) }))) outer_arb.io.in <> outerList io.outer <> outer_arb.io.out // Handle acquire transaction initiation val irel_vs_iacq_conflict = io.inner.acquire.valid && io.inner.release.valid && io.irel().conflicts(io.iacq()) doInputRoutingWithAllocation( in = io.inner.acquire, outs = trackerList.map(_.io.inner.acquire), allocs = trackerList.map(_.io.alloc.iacq), allocOverride = Some(!irel_vs_iacq_conflict)) // Handle releases, which might be voluntary and might have data doInputRoutingWithAllocation( in = io.inner.release, outs = trackerList.map(_.io.inner.release), allocs = trackerList.map(_.io.alloc.irel)) // Wire probe requests and grant reply to clients, finish acks from clients doOutputArbitration(io.inner.probe, trackerList.map(_.io.inner.probe)) doOutputArbitration(io.inner.grant, trackerList.map(_.io.inner.grant)) doInputRouting(io.inner.finish, trackerList.map(_.io.inner.finish)) disconnectOuterProbeAndFinish() } class BroadcastXactTracker(implicit p: Parameters) extends XactTracker()(p) { val io = new HierarchicalXactTrackerIO pinAllReadyValidLow(io) } trait BroadcastsToAllClients extends HasCoherenceAgentParameters { val coh = HierarchicalMetadata.onReset val inner_coh = coh.inner val outer_coh = coh.outer def full_representation = ~UInt(0, width = innerNCachingClients) } abstract class BroadcastVoluntaryReleaseTracker(trackerId: Int)(implicit p: Parameters) extends VoluntaryReleaseTracker(trackerId)(p) with EmitsVoluntaryReleases with BroadcastsToAllClients { val io = new HierarchicalXactTrackerIO pinAllReadyValidLow(io) // Checks for illegal behavior assert(!(state === s_idle && io.inner.release.fire() && io.alloc.irel.should && !io.irel().isVoluntary()), "VoluntaryReleaseTracker accepted Release that wasn't voluntary!") } abstract class BroadcastAcquireTracker(trackerId: Int)(implicit p: Parameters) extends AcquireTracker(trackerId)(p) with EmitsVoluntaryReleases with BroadcastsToAllClients { val io = new HierarchicalXactTrackerIO pinAllReadyValidLow(io) val alwaysWriteFullBeat = false val nSecondaryMisses = 1 def iacq_can_merge = Bool(false) // Checks for illegal behavior // TODO: this could be allowed, but is a useful check against allocation gone wild assert(!(state === s_idle && io.inner.acquire.fire() && io.alloc.iacq.should && io.iacq().hasMultibeatData() && !io.iacq().first()), "AcquireTracker initialized with a tail data beat.") assert(!(state =/= s_idle && pending_ignt && xact_iacq.isPrefetch()), "Broadcast Hub does not support Prefetches.") assert(!(state =/= s_idle && pending_ignt && xact_iacq.isAtomic()), "Broadcast Hub does not support PutAtomics.") } class BufferedBroadcastVoluntaryReleaseTracker(trackerId: Int)(implicit p: Parameters) extends BroadcastVoluntaryReleaseTracker(trackerId)(p) with HasDataBuffer { // Tell the parent if any incoming messages conflict with the ongoing transaction routeInParent(irelCanAlloc = Bool(true)) // Start transaction by accepting inner release innerRelease(block_vol_ignt = pending_orel || vol_ognt_counter.pending) // A release beat can be accepted if we are idle, if its a mergeable transaction, or if its a tail beat io.inner.release.ready := state === s_idle || irel_can_merge || irel_same_xact when(io.inner.release.fire()) { data_buffer(io.irel().addr_beat) := io.irel().data } // Dispatch outer release outerRelease( coh = outer_coh.onHit(M_XWR), data = data_buffer(vol_ognt_counter.up.idx), add_pending_send_bit = irel_is_allocating) quiesce() {} } class BufferedBroadcastAcquireTracker(trackerId: Int)(implicit p: Parameters) extends BroadcastAcquireTracker(trackerId)(p) with HasByteWriteMaskBuffer { // Setup IOs used for routing in the parent routeInParent(iacqCanAlloc = Bool(true)) // First, take care of accpeting new acquires or secondary misses // Handling of primary and secondary misses' data and write mask merging innerAcquire( can_alloc = Bool(false), next = s_inner_probe) io.inner.acquire.ready := state === s_idle || iacq_can_merge || iacq_same_xact_multibeat // Track which clients yet need to be probed and make Probe message // If a writeback occurs, we can forward its data via the buffer, // and skip having to go outwards val skip_outer_acquire = pending_ignt_data.andR innerProbe( inner_coh.makeProbe(curr_probe_dst, xact_iacq, xact_addr_block), Mux(!skip_outer_acquire, s_outer_acquire, s_busy)) // Handle incoming releases from clients, which may reduce sharer counts // and/or write back dirty data, and may be unexpected voluntary releases def irel_can_merge = io.irel().conflicts(xact_addr_block) && io.irel().isVoluntary() && !state.isOneOf(s_idle, s_meta_write) && !all_pending_done && !io.outer.grant.fire() && !io.inner.grant.fire() && !vol_ignt_counter.pending && !blockInnerRelease() innerRelease(block_vol_ignt = vol_ognt_counter.pending) //TODO: accept vol irels when state === s_idle, operate like the VolRelTracker io.inner.release.ready := irel_can_merge || irel_same_xact mergeDataInner(io.inner.release) // If there was a writeback, forward it outwards outerRelease( block_orel = pending_put_data(vol_ognt_counter.up.idx), coh = outer_coh.onHit(M_XWR), data = data_buffer(vol_ognt_counter.up.idx)) // Send outer request for miss outerAcquire( caching = !xact_iacq.isBuiltInType(), coh = outer_coh, data = data_buffer(ognt_counter.up.idx), wmask = wmask_buffer(ognt_counter.up.idx), next = s_busy) // Handle the response from outer memory mergeDataOuter(io.outer.grant) // Acknowledge or respond with data innerGrant( data = data_buffer(ignt_data_idx), external_pending = pending_orel || ognt_counter.pending || vol_ognt_counter.pending) when(iacq_is_allocating) { initializeProbes() } initDataInner(io.inner.acquire, iacq_is_allocating || iacq_is_merging) // Wait for everything to quiesce quiesce() { clearWmaskBuffer() } }
%%-------------------------------------------------------------------- %% Copyright (c) 2021-2022 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, software %% distributed under the License is distributed on an "AS IS" BASIS, %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- %% @doc The emqx-modules configuration interoperable interfaces -module(emqx_modules_conf). -behaviour(emqx_config_handler). %% Load/Unload -export([ load/0, unload/0 ]). -export([ topic_metrics/0, add_topic_metrics/1, remove_topic_metrics/1, is_telemetry_enabled/0, set_telemetry_status/1 ]). %% config handlers -export([ pre_config_update/3, post_config_update/5 ]). %%-------------------------------------------------------------------- %% Load/Unload %%-------------------------------------------------------------------- -spec load() -> ok. load() -> emqx_conf:add_handler([topic_metrics], ?MODULE), emqx_conf:add_handler([telemetry], ?MODULE). -spec unload() -> ok. unload() -> emqx_conf:remove_handler([telemetry]), emqx_conf:remove_handler([topic_metrics]). %%-------------------------------------------------------------------- %% Topic-Metrics %%-------------------------------------------------------------------- -spec topic_metrics() -> [emqx_types:topic()]. topic_metrics() -> lists:map( fun(#{topic := Topic}) -> Topic end, emqx:get_config([topic_metrics]) ). -spec add_topic_metrics(emqx_types:topic()) -> {ok, emqx_types:topic()} | {error, term()}. add_topic_metrics(Topic) -> case cfg_update([topic_metrics], ?FUNCTION_NAME, Topic) of {ok, _} -> {ok, Topic}; {error, Reason} -> {error, Reason} end. -spec remove_topic_metrics(emqx_types:topic()) -> ok | {error, term()}. remove_topic_metrics(Topic) -> case cfg_update([topic_metrics], ?FUNCTION_NAME, Topic) of {ok, _} -> ok; {error, Reason} -> {error, Reason} end. -spec is_telemetry_enabled() -> boolean(). is_telemetry_enabled() -> IsOfficial = emqx_telemetry:official_version(emqx_release:version()), emqx:get_config([telemetry, enable], IsOfficial). -spec set_telemetry_status(boolean()) -> ok | {error, term()}. set_telemetry_status(Status) -> case cfg_update([telemetry], set_telemetry_status, Status) of {ok, _} -> ok; {error, _} = Error -> Error end. %%-------------------------------------------------------------------- %% Config Handler %%-------------------------------------------------------------------- -spec pre_config_update( list(atom()), emqx_config:update_request(), emqx_config:raw_config() ) -> {ok, emqx_config:update_request()} | {error, term()}. pre_config_update(_, {add_topic_metrics, Topic0}, RawConf) -> Topic = #{<<"topic">> => Topic0}, case lists:member(Topic, RawConf) of true -> {error, already_existed}; _ -> {ok, RawConf ++ [Topic]} end; pre_config_update(_, {remove_topic_metrics, Topic0}, RawConf) -> Topic = #{<<"topic">> => Topic0}, case lists:member(Topic, RawConf) of true -> {ok, RawConf -- [Topic]}; _ -> {error, not_found} end; pre_config_update(_, {set_telemetry_status, Status}, RawConf) -> {ok, RawConf#{<<"enable">> => Status}}. -spec post_config_update( list(atom()), emqx_config:update_request(), emqx_config:config(), emqx_config:config(), emqx_config:app_envs() ) -> ok | {ok, Result :: any()} | {error, Reason :: term()}. post_config_update( _, {add_topic_metrics, Topic}, _NewConfig, _OldConfig, _AppEnvs ) -> case emqx_topic_metrics:register(Topic) of ok -> ok; {error, Reason} -> {error, Reason} end; post_config_update( _, {remove_topic_metrics, Topic}, _NewConfig, _OldConfig, _AppEnvs ) -> case emqx_topic_metrics:deregister(Topic) of ok -> ok; {error, Reason} -> {error, Reason} end; post_config_update( _, {set_telemetry_status, Status}, _NewConfig, _OldConfig, _AppEnvs ) -> case Status of true -> emqx_telemetry:enable(); false -> emqx_telemetry:disable() end. %%-------------------------------------------------------------------- %% Private %%-------------------------------------------------------------------- res({ok, Result}) -> {ok, Result}; res({error, {pre_config_update, ?MODULE, Reason}}) -> {error, Reason}; res({error, {post_config_update, ?MODULE, Reason}}) -> {error, Reason}; res({error, Reason}) -> {error, Reason}. cfg_update(Path, Action, Params) -> res( emqx_conf:update( Path, {Action, Params}, #{override_to => cluster} ) ).
# overflowProblem An algorithmic/programming challenge. ## Problem > The following is a paraphrase of the problem found in the specification. Given a pyramid of 250 mL glasses stacked on top of each other with each row of glasses containing 1 more glass than the "row" above it, when N litres of water is poured on the top-most glass, how much water is in any glass i rows from the top and j glasses from the left most glass of that column? ### Inputs > "infinity" used here is an arbitrarily large number, not the concept of infinity. Such a number is limited by the > hardware of the server on which the module is run. The same applies for the remainder of this document. - Decimal number of litres of water poured in the top-most glass from 0.0 to infinity. - Integer row of any glass from 0 to infinity. - Integer column of any glass from 0 to infinity. Note that algorithmic inputs are different from user/other inputs. ### Outputs - Decimal number of litres of water in the queried glass from 0.0 L to 0.250 L. Note that algorithmic outputs are different from user/other outputs. ### Design #### Architecture A Python module will be used to perform the calculation of each glass allowing abstraction between the serving layer (in this case, only a "main" method). ### Algorithm #### Definition The glasses are stacked as thus: ```markdown 0: g 1: g g 2: g g g 3: g g g g ``` ...and so on. The top `g` will flow down water to the g right below it and the one to the right. The rate of filling of any glass is dependant on the rate of filling from the glasses above. This, according to experimentation with inputs, appears to be an incidence of Pascal's Triangle. `We can use this for our unit tests.` Because it must be known how much water is in the row directly above any row to determine the value in the bottom most row, we must keep a data structure containing all of the glasses. #### Pseudocode > This is formatted as Python but is not valid code. Assume L is the number of litres poured into the top glass. ```python CAPACITY = 0.250 required_glasses = L / capacity required_rows = __compute_triangle_number(required_glasses) glass_array = __initialise_array(required_rows) # Fill the top glass with all of the water. glass_array[0][0] = L for row in glass_array: for glass in row: if glass > CAPACITY: flow = glass_array(glass - CAPACITY) / 2 row[glass.index] += flow row[glass.index + 1] += flow # The return value should be glass_array[i][j]. ``` #### Problem Assumptions - Assume that the water takes no time to flow. - Assume that water evaporation, spillage, and other real-world physical factors aside from gravity do not play a part in the amount of water in any cup. - Assume that for the amount of water poured in the top glass the input will be interpreted up to 5 decimal places. - Assume that for the output amount of water will be within 1e-5 of the actual amount of water in that glass. #### Testing ##### Algorithm and Module Unit Testing We are able to easily determine the amount of water in any glass given a predetermined amount of water poured in the top glass. This may be done for elementary cases up to a certain amount of water. For unit tests to pass, the output result must be within 1e-5 of the actual expected amount of water. More information on running the tests can be found below in the [Build](#build) section. ##### CLI Acceptance Testing There is little point in testing the CLI as the `argparse` standard library is well tested, i.e.: there is little value in these tests. #### Exceptions For illegal inputs (in this case, asking for a column that does not exist) the Python module will raise an exception. ## Build ### Prerequisites The preferred way of building this application will be shown first, then with more "conventional" build instructions: - pipenv >= 2018.7.1 - python >= 2.7 or >= 3.5 ```bash pip install pipenv ``` The conventiona requirements are (without the build tool): - python 3.5 - pytest ```bash pip install pytest ``` ### Build Instructions Pipenv is a similar build tool to `rpm` that bundles local dependencies, manages versions of Python, and can execute scripts in a local Python environment. Install all dependencies with: ```bash pipenv install ``` ### Running Instructions The interface to this problem's solution is a command-line interface. To execute: ```bash pipenv run python overflow.py <litres> <row> <column> ``` For example: ```bash pipenv run python overflow.py 1.0 2 1 ``` Without pipenv, you may simply run: ```bash python overflow.py 1.0 2 1 ``` ### Running Tests You can run the 8 unit tests by running: ```bash pipenv run pytest ``` If you don't use pipenv and have pytest, run: ```bash pytest ```
package mock import ( "reflect" record "github.com/appist/appy/record" mock "github.com/stretchr/testify/mock" ) type ( // Mock is the workhorse used to track activity on another object. Mock struct { mock.Mock } // AnythingOfTypeArgument is a string that contains the type of an argument // for use when type checking. AnythingOfTypeArgument = mock.AnythingOfTypeArgument // IsTypeArgument is a struct that contains the type of an argument for use // when type checking. This is an alternative to AnythingOfType. IsTypeArgument = mock.IsTypeArgument ) const ( // Anything is used in Diff and Assert when the argument being tested // shouldn't be taken into consideration. Anything = mock.Anything ) var ( // AnythingOfType returns an AnythingOfTypeArgument object containing the // name of the type to check for. AnythingOfType = mock.AnythingOfType // IsType returns an IsTypeArgument object containing the type to check for. // You can provide a zero-value of the type to check. This is an alternative // to AnythingOfType. IsType = mock.IsType ) // NewDB initializes a test DB that is useful for testing purpose. func NewDB() (func(name string) record.DBer, *DB) { db := &DB{} return func(name string) record.DBer { return db }, db } // NewModel initializes a test model that is useful for testing purpose. func NewModel(mockedDest interface{}) (func(dest interface{}, opts ...record.ModelOption) record.Modeler, *Model) { m := &Model{} return func(dest interface{}, opts ...record.ModelOption) record.Modeler { if mockedDest != nil { val := reflect.ValueOf(dest) val.Elem().Set(reflect.ValueOf(mockedDest).Elem()) } return m }, m }
#_{:clj-kondo/ignore [:refer-all]} (ns drampa.matches-test (:require [clojure.test :refer :all] [drampa.matches :refer :all] [drampa.claims :as d.claims] [drampa.tiles :as d.tiles])) (deftest get-initial-match-is-correct (let [{:keys [wall dead-wall players prevailing-wind dora ura-dora]} (get-initial-match)] (testing "Are the players of a match initialized correctly?" (is (not (nil? players))) (is (= (count players) 4)) (testing "Are the seat winds of a match initialized correctly?" (let [player-winds (map :wind players)] (is (not (nil? player-winds))) (is (= (count player-winds) 4)) (is (distinct? player-winds)) (is (some #{:east} player-winds)) (is (some #{:south} player-winds)) (is (some #{:west} player-winds)) (is (some #{:north} player-winds)))) (testing "Are the starting scores of a match initialized correctly?" (let [player-scores (map :score players)] (is (not (nil? player-scores))) (is (= (count player-scores) 4)) (is (every? #(= starting-score %) player-scores)))) (testing "Have the hands been dealt correctly?" (doseq [{:keys [wind hand]} players] (is (= (count hand) (if (= :east wind) 14 13)))))) (testing "Is the prevailing wind of a match initialized correctly?" (is (= prevailing-wind :east))) (testing "Is the wall broken correctly?" (is (not (nil? dead-wall))) (is (= (count dead-wall) 14)) (is (not (nil? wall))) (is (= (count wall) 69))) (testing "Has one dora been revealed?" (is (not (nil? dora))) (is (= (count dora) 1)) (is (not (nil? dora))) (is (= (count ura-dora) 1))))) (def break-wall-at-test-cases (sorted-map 2 [(str "22223333444455506666777788889999m1111222233334444555566667777z111122223333444455506666777788889999p" "11112222333344445550666677s") "7788889999s1111m"] 3 [(str "11222233334444555566667777z111122223333444455506666777788889999p111122223333444455506666777788889999s" "111122223333444455506666m") "777788889999m11z"] 4 [(str "223333444455506666777788889999p111122223333444455506666777788889999s111122223333444455506666777788889999m" "11112222333344445555z") "66667777z111122p"] 5 [(str "1122223333444455506666777788889999s111122223333444455506666777788889999m1111222233334444555566667777z" "111122223333444455506666p") "777788889999p11s"] 6 [(str "9999s111122223333444455506666777788889999m1111222233334444555566667777z111122223333444455506666777788889999" "p111122223333444455s") "50666677778888s"] 7 [(str "889999m1111222233334444555566667777z111122223333444455506666777788889999p111122223333444455506666777788889999s" "1111222233334444m") "55506666777788m"] 8 [(str "77z111122223333444455506666777788889999p111122223333444455506666777788889999s" "111122223333444455506666777788889999m111122223333z") "44445555666677z"] 9 [(str "889999p111122223333444455506666777788889999s111122223333444455506666777788889999m1111222233334444555566667777z" "1111222233334444p") "55506666777788p"] 10 [(str "777788889999s111122223333444455506666777788889999m1111222233334444555566667777z" "111122223333444455506666777788889999p1111222233s") "33444455506666s"] 11 [(str "66777788889999m1111222233334444555566667777z111122223333444455506666777788889999p" "111122223333444455506666777788889999s11112222m") "33334444555066m"] 12 [(str "5566667777z111122223333444455506666777788889999p111122223333444455506666777788889999s" "111122223333444455506666777788889999m1111z") "22223333444455z"])) (deftest break-wall-at-is-correct (let [break-test-wall (vec (d.tiles/sort-tiles d.tiles/initial-wall)) test-cases (reduce-kv #(assoc %1 %2 (map d.tiles/tiles-from-notation %3)) (sorted-map) break-wall-at-test-cases)] (testing "Is the wall broken correctly given a certain dice roll?" (doseq [[dice-roll expected-value] test-cases] (is (= (break-wall-at break-test-wall dice-roll) expected-value)))))) (def reveal-dora-test-cases (sorted-map 1 {:dora (d.tiles/->Tile :zi 3) :ura-dora (d.tiles/->Tile :zi 5)} 2 {:dora (d.tiles/->Tile :zi 4) :ura-dora (d.tiles/->Tile :zi 4)} 3 {:dora (d.tiles/->Tile :zi 5) :ura-dora (d.tiles/->Tile :zi 3)} 4 {:dora (d.tiles/->Tile :zi 6) :ura-dora (d.tiles/->Tile :zi 2)})) (defn- reveal-dora-test-case-reducer [test-cases key {:keys [dora ura-dora]}] (let [{last-dora :dora last-ura-dora :ura-dora} (get test-cases (dec key))] (-> test-cases (assoc-in [key :dora] (conj last-dora dora)) (assoc-in [key :ura-dora] (conj last-ura-dora ura-dora))))) (defn- reveal-dora-multiple-times [match times] (reduce (fn [acc-match _] (reveal-dora acc-match)) match (range times))) (deftest reveal-dora-is-correct (let [test-dead-wall (d.tiles/tiles-from-notation "17263544536271z") test-cases (reduce-kv reveal-dora-test-case-reducer {0 {:dora [] :ura-dora []}} reveal-dora-test-cases) test-cases (dissoc test-cases 0) match (->Match nil test-dead-wall nil nil nil [] [])] (testing "Are dora tiles revealed correctly multiple times?" (doseq [[reveal-count {expected-dora :dora expected-ura-dora :ura-dora}] test-cases :let [{actual-dora :dora actual-ura-dora :ura-dora} (reveal-dora-multiple-times match reveal-count)]] (testing (str "Are they correctly revealed after " reveal-count " time(s)?") (is (= expected-dora actual-dora)) (is (= expected-ura-dora actual-ura-dora))))))) (deftest get-dora-from-indicator-is-correct (testing "Can the dora be retrieved from the indicator?" (testing "If the tile is a red five?" (is (= (get-dora-from-indicator (d.tiles/->Tile :pin 0)) (d.tiles/->Tile :pin 6))) (is (= (get-dora-from-indicator (d.tiles/->Tile :sou 0)) (d.tiles/->Tile :sou 6))) (is (= (get-dora-from-indicator (d.tiles/->Tile :man 0)) (d.tiles/->Tile :man 6)))) (testing "If the tile is a nine?" (is (= (get-dora-from-indicator (d.tiles/->Tile :pin 9)) (d.tiles/->Tile :pin 1))) (is (= (get-dora-from-indicator (d.tiles/->Tile :sou 9)) (d.tiles/->Tile :sou 1))) (is (= (get-dora-from-indicator (d.tiles/->Tile :man 9)) (d.tiles/->Tile :man 1)))) (testing "If the tile is a different number tile?" (doseq [suit [:pin :sou :man] rank [1 2 3 4 5 6 7 8]] (is (= (get-dora-from-indicator (d.tiles/->Tile suit rank)) (d.tiles/->Tile suit (inc rank)))))) (testing "If the tile is a North Wind?" (is (= (get-dora-from-indicator (d.tiles/->Tile :zi 4)) (d.tiles/->Tile :zi 1)))) (testing "If the tile is a Red Dragon?" (is (= (get-dora-from-indicator (d.tiles/->Tile :zi 7)) (d.tiles/->Tile :zi 5)))) (testing "If the tile is a different honor?" (doseq [rank [1 2 3 5 6]] (is (= (get-dora-from-indicator (d.tiles/->Tile :zi rank)) (d.tiles/->Tile :zi (inc rank)))))))) (def deal-initial-hands-test-wall (d.tiles/tiles-from-notation "9p9s9m7z5p4z4m4s4p3333z3333m3333s3333p2222z2222m2222s2222p1111z1111m1111s1111p")) (def deal-initial-hands-expected-hands {:east "11112222333345p" :south "1111222233334s" :west "1111222233334m" :north "1111222233334z"}) (defn load-deal-initial-hands-expected-hands [] (reduce-kv #(assoc %1 %2 (d.tiles/tiles-from-notation %3)) {} deal-initial-hands-expected-hands)) (deftest deal-initial-hands-is-correct (testing "Are hands dealt correctly from the wall?" (let [test-wall deal-initial-hands-test-wall match (->Match test-wall nil (fill-players 0) nil nil nil nil) {:keys [wall players]} (deal-initial-hands match) expected-hands (load-deal-initial-hands-expected-hands)] (testing "Is the remaining wall correct?" (is (= wall (d.tiles/tiles-from-notation "9p9s9m7z")))) (testing "Are the hands correct?" (doseq [{:keys [wind hand]} players :let [expected-hand (expected-hands wind)]] (testing (str "Is the " wind " hand correct?") (is (= expected-hand (d.tiles/sort-tiles hand))))))))) (def sentinel-tile (d.tiles/->Tile :zi 8)) (def sentinel-tile-dead-wall (d.tiles/->Tile :zi 9)) (defn test-match-from-starting-hands [hands] (let [[east-hand south-hand west-hand north-hand] hands _ (apply concat hands) live-wall-beginning (concat (subvec east-hand 0 4) (subvec south-hand 0 4) (subvec west-hand 0 4) (subvec north-hand 0 4) (subvec east-hand 4 8) (subvec south-hand 4 8) (subvec west-hand 4 8) (subvec north-hand 4 8) (subvec east-hand 8 12) (subvec south-hand 8 12) (subvec west-hand 8 12) (subvec north-hand 8 12) [(get east-hand 12) (get south-hand 12) (get west-hand 12) (get north-hand 12) (get east-hand 13)]) wall (vec (concat (repeat 6 sentinel-tile-dead-wall) (repeat 69 sentinel-tile) (reverse live-wall-beginning) (repeat 8 sentinel-tile-dead-wall))) players (fill-players 0 :always-last :always-claim)] (get-initial-match wall 4 players))) (def get-claims-test-cases [ [ ["1111222233334z2p" "444z11112222m13p" "555566667777z1s" "3333444455506m"] [nil {:claiming-wind :south :claim-type :chii :choice "123p" :discarding-wind :east} nil nil]] [ ["1111222233334z0p" "444z11112222m46p" "555566667777z1s" "3333444455506m"] [nil {:claiming-wind :south :claim-type :chii :choice "406p" :discarding-wind :east} nil nil]] [ ["1111222233334p2z" "444z11112222m13z" "555566667777p1s" "3333444455506m"] [nil nil nil nil]] [ ["1111222233334z2p" "555566667777z1s" "444z11112222m13p" "3333444455506m"] [nil nil nil nil]] [ ["1111222233334z2p" "444z11112222m13p" "55556666777z22p" "3333444455506m"] [ nil {:claiming-wind :south :claim-type :chii :choice "123p" :discarding-wind :east} {:claiming-wind :west :claim-type :pon :choice "222p" :discarding-wind :east} nil]] [ ["1111222233334z2p" "444z11112222m13s" "3333444455506m" "55556666777z22p"] [nil nil nil {:claiming-wind :north :claim-type :pon :choice "222p" :discarding-wind :east}]] [ ["1111222233334z5p" "444z1111222m550p" "3333444455506m" "55556666777z22p"] [nil {:claiming-wind :south :claim-type :kan :choice nil :discarding-wind :east} nil nil]] [ ["1111222233334z2p" "444z11112222m13p" "5555666677z222p" "3333444455506m"] [ nil {:claiming-wind :south :claim-type :chii :choice "123p" :discarding-wind :east} {:claiming-wind :west :claim-type :kan :choice nil :discarding-wind :east} nil]] ]) (defn- load-get-claims-expected-claim [claim-map] (if (nil? claim-map) nil (d.claims/map->Claim (update claim-map :choice d.tiles/tiles-from-notation)))) (deftest get-claims-is-correct (testing "Are claims correctly offered and taken from known walls?" (doseq [[starting-hands-notation expected-claims-maps] get-claims-test-cases :let [starting-hands (mapv d.tiles/tiles-from-notation starting-hands-notation) expected-claims (mapv load-get-claims-expected-claim expected-claims-maps)]] (let [match (-> (test-match-from-starting-hands starting-hands) (perform-draw)) actual-claims (get-claims match)] (is (= expected-claims actual-claims))))))
#!/bin/bash mkdir vlc mkdir play mkdir pause mkdir close mkdir back mkdir fullscreen mkdir next mkdir prev mkdir volume-up mkdir volume-down mkdir not-vlc
#ifndef SCRIPT_STATE_H #define SCRIPT_STATE_H #include "IPillarState.h" class ScriptState : public IPillarState { public: ScriptState() {}; void OnEnter(PillarInput *pInput, PillarOutput *pOutput) override; PillarMode OnExecute(PillarInput *pInput, PillarOutput *pOutput) override; void OnExit(PillarInput *pInput, PillarOutput *pOutput) override; }; #endif /* SCRIPT_STATE_H */
package io.warburton.zolin.model /** * @author tw */ data class Merchant(val id: String, val name: String)
package Analytics import ( "github.com/loyal1213/goonvif/xsd/onvif" "github.com/loyal1213/goonvif/xsd" ) type GetSupportedRules struct { XMLName string `xml:"tan:GetSupportedRules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type CreateRules struct { XMLName string `xml:"tan:CreateRules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` Rule onvif.Config `xml:"tan:Rule"` } type DeleteRules struct { XMLName string `xml:"tan:DeleteRules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` RuleName xsd.String `xml:"tan:RuleName"` } type GetRules struct { XMLName string `xml:"tan:GetRules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type GetRuleOptions struct { XMLName string `xml:"tan:GetRuleOptions"` RuleType xsd.QName `xml:"tan:RuleType"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type ModifyRules struct { XMLName string `xml:"tan:ModifyRules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` Rule onvif.Config `xml:"tan:Rule"` } type GetServiceCapabilities struct { XMLName string `xml:"tan:GetServiceCapabilities"` } type GetSupportedAnalyticsModules struct { XMLName string `xml:"tan:GetSupportedAnalyticsModules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type GetAnalyticsModuleOptions struct { XMLName string `xml:"tan:GetAnalyticsModuleOptions"` Type xsd.QName `xml:"tan:Type"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type CreateAnalyticsModules struct { XMLName string `xml:"tev:CreateAnalyticsModules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` AnalyticsModule onvif.Config `xml:"tan:AnalyticsModule"` } type DeleteAnalyticsModules struct { XMLName string `xml:"tan:DeleteAnalyticsModules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` AnalyticsModuleName xsd.String `xml:"tan:AnalyticsModuleName"` } type GetAnalyticsModules struct { XMLName string `xml:"tan:GetAnalyticsModules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` } type ModifyAnalyticsModules struct { XMLName string `xml:"tan:ModifyAnalyticsModules"` ConfigurationToken onvif.ReferenceToken `xml:"tan:ConfigurationToken"` AnalyticsModule onvif.Config `xml:"tan:AnalyticsModule"` }
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import React from 'react'; import styled from 'styled-components'; import {RHSParticipant, Rest} from 'src/components/rhs/rhs_participant'; interface Props { userIds: string[]; } const Following = (props: Props) => { if (props.userIds.length === 0) { return null; } return ( <> <FollowingWrapper> {props.userIds.length + ' following'} </FollowingWrapper> <UserRow tabIndex={0} role={'button'} > {props.userIds.slice(0, 5).map((userId: string) => ( <RHSParticipant key={userId} userId={userId} sizeInPx={20} /> ))} {props.userIds.length > 5 && <Rest sizeInPx={20}>{'+' + (props.userIds.length - 5)}</Rest> } </UserRow> </> ); }; const FollowingWrapper = styled.div` color: rgba(var(--center-channel-color-rgb), 0.72); font-size: 11px; line-height: 16px; `; const UserRow = styled.div` width: max-content; padding: 0; display: flex; flex-direction: row; border-radius: 44px; margin-left: 12px; :hover { border-color: rgba(var(--center-channel-color-rgb), 0.08); background-color: rgba(var(--center-channel-color-rgb), 0.08); background-clip: padding-box; } `; export default Following;
--- pageClass: page-daily-list date: 2018.12.24 title: 日报 2018.12.24 meta: - itemprop: name content: 日报 2018.12.24 - name: description itemprop: description content: 今天的新发现 list: - name: 开源项目 list: - name: gitalk/gitalk note: 基于 GitHub Issue 和 Preact 开发的评论插件 url: https://github.com/gitalk/gitalk lang: JavaScript,CSS,HTML watch: 29 star: 2071 fork: 179 - name: dhamaniasad/HeadlessBrowsers note: 无头浏览器列表 url: https://github.com/dhamaniasad/HeadlessBrowsers lang: other watch: 161 star: 4207 fork: 276 isChinese: false - name: salomonelli/best-resume-ever note: 精美的建立模板,基于vue url: https://github.com/salomonelli/best-resume-ever - name: vue-clamp note: 多行文本截断 url: https://justineo.github.io/vue-clamp/demo/?lang=zh - name: i0natan/nodebestpractices note: Node.JS 最佳实践(截止至2018年11月) url: https://github.com/i0natan/nodebestpractices - name: swc-project/swc note: 比 babel 更快的编译方式 url: https://github.com/swc-project/swc - name: 工具 list: - name: unbug/codelf note: 变量命名神器-从Github, Bitbucket, Google Code, Codeplex, Sourceforge, Fedora Project, GitLab 搜集的真实命名方式 url: https://github.com/unbug/codelf - name: intika/Librefox note: 注重个人隐私和安全的浏览器 url: https://github.com/intika/Librefox - name: asciinema - 终端录屏分享工具 note: asciinema - Record and share your terminal sessions, the right way url: https://asciinema.org/ --- <daily-list v-bind="$page.frontmatter"/>
export function ensure<TValue>( predicate: (value: unknown) => value is TValue, defaultValue: TValue ): (value: unknown) => TValue { return (value) => (predicate(value) ? value : defaultValue); }
/* * Copyright (c) 2019. JetBrains s.r.o. * Use of this source code is governed by the MIT license that can be found in the LICENSE file. */ package jetbrains.datalore.plot.builder.interact.loc import jetbrains.datalore.base.geometry.DoubleVector import jetbrains.datalore.plot.base.interact.GeomTarget import jetbrains.datalore.plot.base.interact.GeomTargetLocator.LookupStrategy import jetbrains.datalore.plot.builder.interact.TestUtil.HitIndex import jetbrains.datalore.plot.builder.interact.TestUtil.PathPoint import org.assertj.core.api.Assertions.assertThat import kotlin.test.Test class LayerTargetLocatorPathHoverXTest : jetbrains.datalore.plot.builder.interact.loc.TargetLocatorPathXTestBase() { override val strategy: LookupStrategy get() = LookupStrategy.HOVER @Test fun hoverX_WhenCloserToLeft() { assertThat( findTargets(rightFrom(p1, THIS_POINT_DISTANCE )) ).first().has(HitIndex.equalTo(p1.hitIndex)) } @Test fun hoverX_WhenCloserToRight() { assertThat( findTargets(rightFrom(p1, NEXT_POINT_DISTANCE )) ).first().has(HitIndex.equalTo(p2.hitIndex)) } @Test fun hoverX_WhenInTheMiddle_ShouldSelectSecondPoint() { assertThat( findTargets(rightFrom(p1, MIDDLE_POINTS_DISTANCE )) ).first().has(HitIndex.equalTo(p1.hitIndex)) } @Test fun hoverX_WhenOutOfPath_ShouldFindNothing() { assertThat( findTargets(leftFrom(p0, NEXT_POINT_DISTANCE )) ).isEmpty() } private fun leftFrom(p: PathPoint, distance: Double): DoubleVector { return DoubleVector(p.x - distance, p.y) } private fun rightFrom(p: PathPoint, distance: Double): DoubleVector { return DoubleVector(p.x + distance, p.y) } private fun findTargets(p: DoubleVector): List<GeomTarget> { return jetbrains.datalore.plot.builder.interact.TestUtil.findTargets(locator, p) } }
export * from './radiobutton.module'; export * from './radiobutton.component';
--- -api-id: P:Windows.Devices.WiFiDirect.WiFiDirectInformationElement.Oui -api-type: winrt property -api-device-family-note: xbox --- <!-- Property syntax public Windows.Storage.Streams.IBuffer Oui { get; set; } --> # Windows.Devices.WiFiDirect.WiFiDirectInformationElement.Oui ## -description A three-byte organization identifier used to indicate the organization that defined a vendor extension information element (IE). ## -property-value A three-byte organization identifier used to indicate the organization that defined a vendor extension IE. ## -remarks ## -examples ## -see-also
using FengZhen.SuperStore.Data.Entities; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace FengZhen.SuperStore.Data.Repositories { public interface IProductRepositories { int AddProduct(string id, string name, decimal price, int count); int RemoveProduct(string id, int count); int UpdateProductById(string id, string name, decimal price, int count); Product GetProductById(string id); List<Product> GetProducts(); } }
package client import ( "fmt" minting "github.com/threefoldtech/rivine/extensions/minting" "github.com/threefoldtech/rivine/extensions/minting/api" client "github.com/threefoldtech/rivine/pkg/client" types "github.com/threefoldtech/rivine/types" ) // PluginClient is used to be able to get the active mint condition, // and the active mint condition at a given block height, // such that the CLI can correctly validate a mint condition, // without requiring access to the consensus-extended transactiondb, // normally the validation isn't required on the client side, but it is possible none the less. type PluginClient struct { client client.BaseClient rootEndpoint string } // NewPluginConsensusClient creates a new PluginClient, // that can be used for easy interaction with the TransactionDB API exposed via the Consensus endpoints func NewPluginConsensusClient(cli client.BaseClient) *PluginClient { if cli == nil { panic("no BaseClient given") } return &PluginClient{ client: cli, rootEndpoint: "/consensus", } } // NewPluginExplorerClient creates a new PluginClient, // that can be used for easy interaction with the TransactionDB API exposed via the Explorer endpoints func NewPluginExplorerClient(cli client.BaseClient) *PluginClient { if cli == nil { panic("no BaseClient given") } return &PluginClient{ client: cli, rootEndpoint: "/explorer", } } var ( // ensure PluginClient implements the MintConditionGetter interface _ minting.MintConditionGetter = (*PluginClient)(nil) ) // GetActiveMintCondition implements minting.MintConditionGetter.GetActiveMintCondition func (cli *PluginClient) GetActiveMintCondition() (types.UnlockConditionProxy, error) { var result api.TransactionDBGetMintCondition err := cli.client.HTTP().GetWithResponse(cli.rootEndpoint+"/mintcondition", &result) if err != nil { return types.UnlockConditionProxy{}, fmt.Errorf( "failed to get active mint condition from daemon: %v", err) } return result.MintCondition, nil } // GetMintConditionAt implements minting.MintConditionGetter.GetMintConditionAt func (cli *PluginClient) GetMintConditionAt(height types.BlockHeight) (types.UnlockConditionProxy, error) { var result api.TransactionDBGetMintCondition err := cli.client.HTTP().GetWithResponse(fmt.Sprintf("%s/mintcondition/%d", cli.rootEndpoint, height), &result) if err != nil { return types.UnlockConditionProxy{}, fmt.Errorf( "failed to get mint condition at height %d from daemon: %v", height, err) } return result.MintCondition, nil }
# Bad Map Generator ### A 2D top down map generator. Uses SDL for drawing on the screen, getting user input, and timing. #Usage **ESC** Quit the application. **r** Fills map with random noise. **g** Fills map with greyscale noise. **m** Fills map with Lichen-looking stuff. **n** Fills map with a map generated with Perlin noise. **w** Writes the current map to Map.bmp and Color_Map.bmp. Color_Map.bmp is in color, Map.bmp is greyscale. **Arrow Up** Increases frequency of perlin noise by 0.001 (Default is 0.004). **Arrow Down** Decreases frequency of perlin noise by 0.001 (default is 0.004). #Screenshots #### Basic maps using perlin noise ![screen shot 1](screens/screen_1.png) ![screen shot 2](screens/screen_2.png) #### Moving and zooming ![moving and zooming](screens/v2.0.0.gif)