text
stringlengths 27
775k
|
---|
# sharp
<img src="https://cdn.jsdelivr.net/gh/lovell/sharp@master/docs/image/sharp-logo.svg" width="160" height="160" alt="sharp logo" align="right">
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG, AVIF and WebP images of varying dimensions.
Resizing an image is typically 4x-5x faster than using the
quickest ImageMagick and GraphicsMagick settings
due to its use of [libvips](https://github.com/libvips/libvips).
Colour spaces, embedded ICC profiles and alpha transparency channels are all handled correctly.
Lanczos resampling ensures quality is not sacrificed for speed.
As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern macOS, Windows and Linux systems running Node.js >= 12.13.0
do not require any additional install or runtime dependencies.
### Formats
This module supports reading JPEG, PNG, WebP, AVIF, TIFF, GIF and SVG images.
Output images can be in JPEG, PNG, WebP, AVIF and TIFF formats as well as uncompressed raw pixel data.
Streams, Buffer objects and the filesystem can be used for input and output.
A single input Stream can be split into multiple processing pipelines and output Streams.
Deep Zoom image pyramids can be generated,
suitable for use with "slippy map" tile viewers like
[OpenSeadragon](https://github.com/openseadragon/openseadragon).
### Fast
This module is powered by the blazingly fast
[libvips](https://github.com/libvips/libvips) image processing library,
originally created in 1989 at Birkbeck College
and currently maintained by a small team led by
[John Cupitt](https://github.com/jcupitt).
Only small regions of uncompressed image data
are held in memory and processed at a time,
taking full advantage of multiple CPU cores and L1/L2/L3 cache.
Everything remains non-blocking thanks to _libuv_,
no child processes are spawned and Promises/async/await are supported.
### Optimal
The features of `mozjpeg` and `pngquant` can be used
to optimise the file size of JPEG and PNG images respectively,
without having to invoke separate `imagemin` processes.
Huffman tables are optimised when generating JPEG output images
without having to use separate command line tools like
[jpegoptim](https://github.com/tjko/jpegoptim) and
[jpegtran](http://jpegclub.org/jpegtran/).
PNG filtering is disabled by default,
which for diagrams and line art often produces the same result
as [pngcrush](https://pmt.sourceforge.io/pngcrush/).
### Contributing
A [guide for contributors](https://github.com/lovell/sharp/blob/master/.github/CONTRIBUTING.md)
covers reporting bugs, requesting features and submitting code changes.
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
# compliantcontacts
# Overview
This creates a class Contact and Contact List.
# Instructions for Installation
Download files.
# Instructions for Usage
Run in an appropriate environment like JSFiddle or IntelliJ.
# Credits
Author: Reed Shinsato
|
module ProjectHelper
def content_for_project_popover(project)
buffer = ""
buffer << "<div class='nowrap'><h3 style='margin-top: 0px;'>#{project.title}</h3></div>"
if project.tag_list.any?
buffer << "<div class='nowrap'>"
project.tag_list.each do |word|
buffer << "<span class='btn btn-default btn-xs'>#{sanitize(word, tags: [])}</span> "
end
buffer << "</div>"
end
buffer << "<div class='row-padded-top nowrap'>"
project.involved_users.take(10).each do |user|
buffer << image_tag(user.image.url(:thumb), class: "img-circle", style: "width: 20px; padding: 1px;")
end
buffer << "</div>"
buffer
end
end
|
package com.fabirt.roka.features.categories.presentation.view
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.core.view.isVisible
import androidx.fragment.app.activityViewModels
import androidx.lifecycle.lifecycleScope
import androidx.navigation.fragment.findNavController
import androidx.navigation.fragment.navArgs
import androidx.paging.LoadState
import androidx.recyclerview.widget.LinearLayoutManager
import com.fabirt.roka.R
import com.fabirt.roka.core.domain.model.Recipe
import com.fabirt.roka.core.error.toFailure
import com.fabirt.roka.core.presentation.adapters.PagingLoadStateAdapter
import com.fabirt.roka.core.presentation.adapters.RecipePagingAdapter
import com.fabirt.roka.core.presentation.dispatchers.RecipeEventDispatcher
import com.fabirt.roka.core.utils.applyTopWindowInsets
import com.fabirt.roka.core.utils.bindNetworkImage
import com.fabirt.roka.core.utils.configureStatusBar
import com.fabirt.roka.core.utils.navigateToRecipeDetail
import com.fabirt.roka.databinding.FragmentCategoryDetailBinding
import com.fabirt.roka.features.categories.presentation.viewmodel.CategoryDetailViewModel
import com.google.android.material.transition.MaterialContainerTransform
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.launch
class CategoryDetailFragment : Fragment(), RecipeEventDispatcher {
private val viewModel: CategoryDetailViewModel by activityViewModels()
private val args: CategoryDetailFragmentArgs by navArgs()
private lateinit var adapter: RecipePagingAdapter
private var _binding: FragmentCategoryDetailBinding? = null
private val binding get() = _binding!!
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
adapter = RecipePagingAdapter(this)
viewModel.requestRecipesForCategory(args.category)
configureTransitions()
}
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
configureStatusBar(false)
_binding = FragmentCategoryDetailBinding.inflate(inflater, container, false)
return binding.root
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding.rootView.transitionName = args.category.name
binding.btnBack.applyTopWindowInsets()
binding.rvRecipes.layoutManager = LinearLayoutManager(requireContext())
binding.rvRecipes.adapter = adapter.withLoadStateFooter(PagingLoadStateAdapter())
binding.tvTitle.text = args.category.name
bindNetworkImage(binding.ivCategoryItem, args.category.imageUrl)
binding.btnBack.setOnClickListener {
findNavController().navigateUp()
}
binding.errorLayout.btnRetry.setOnClickListener {
adapter.refresh()
}
setupObservers()
}
override fun onDestroyView() {
super.onDestroyView()
_binding = null
}
override fun onRecipePressed(recipe: Recipe, view: View) {
navigateToRecipeDetail(recipe, view)
}
private fun setupObservers() {
adapter.addLoadStateListener { loadStates ->
val loadState = loadStates.source.refresh
binding.progressLayout.spinView.isVisible = loadState is LoadState.Loading
binding.rvRecipes.isVisible = loadState is LoadState.NotLoading && adapter.itemCount > 0
binding.errorLayout.errorView.isVisible = loadState is LoadState.Error
if (loadState is LoadState.Error) {
val failure = loadState.error.toFailure()
binding.errorLayout.tvErrorSubtitle.text = failure.translate(requireContext())
}
}
viewLifecycleOwner.lifecycleScope.launch {
viewModel.recipesFlow?.collectLatest { pagingData ->
adapter.submitData(pagingData)
}
}
}
private fun configureTransitions() {
val duration = resources.getInteger(R.integer.page_transition_duration)
val color = requireContext().getColor(R.color.colorBackground)
val transition = MaterialContainerTransform().apply {
this.duration = duration.toLong()
containerColor = color
drawingViewId = R.id.homeNavHostContainer
}
sharedElementEnterTransition = transition
sharedElementReturnTransition = transition
}
} |
# Fizz Buzz with Python
## Introduction
This is just a simple implementation of the Fizz-Buzz Coding Kata in Python
## Highlights
* Functional Programming of the rules
* Whole implementation was derived by TDD
* parametrized tests with Nose
## What I learned
* Python refactorings in IntelliJ could be better
* how to work with lists and filter
* parametrized tests in Python are cool though there is a typo in the the package name (nose_*parameterized*) ;-) |
from pre_treat_message import pre_treat_message
import random
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def encrypt(message, key):
""" apply the null cipher, where the key tells at what index the letters of the plaintext should be hidden in each
word* of the encrypted text. the 'words' generated by this function will be 5-letter long strings of randomly-chosen letters
for instance here, are valid encryptions:
- for key=0, HIGH could be encrypted as HITOW IPSLO GOPES HYTER
- for key=2, NOON could be encrypted as TUNDE ODOWT ROOWT PUNDT
"""
message = pre_treat_message(message) # convert to uppercase and remove punctuation
word_substitutes = [] # store the words here
# assemble the disguised message by going through each letter of the plaintext
# and replacing it with a random 5-letter string that has that character in the key position
for c in message:
if c not in ALPHABET: # for our implementation, we won't bother trying to encrypt values not in ALPHABET
continue
# which index of the 5-letter string should match the plaintext?
index_of_plaintext = key % 5 # use the modulo operator so that we can "wrap" large keys into an index that exists in the 5-letter string
# generate a 5-letter string that has random values for all positions except for index_of_plaintext
word_substitute = ''
for i in range(0,5):
if i == index_of_plaintext:
word_substitute += c
else:
# add a randomly-picked letter to word_substitute
random_integer_between_zero_and_twenty_five = random.randint(0,25)
word_substitute += ALPHABET[random_integer_between_zero_and_twenty_five]
word_substitutes.append(word_substitute)
return ' '.join(word_substitutes) # return the words as a single string, with words separated by a space
# do not modify this file beyond this point
MESSAGE = 'Strike at dawn.'
print encrypt(MESSAGE, 1) # encrypt the message with a key of 1
print encrypt(MESSAGE, 3) # encrypt the message with a key of 3
|
.ORG 0
ADD A,A
ADD A,1
ADD A,(HL)
ADD A,(IX+1)
ADD A
ADD 1
ADD (HL)
ADD (IX+1)
ADC A,A
ADC A,1
ADC A,(HL)
ADC A,(IX+1)
ADC A
ADC 1
ADC (HL)
ADC (IX+1)
SUB A,A
SUB A,1
SUB A,(HL)
SUB A,(IX+1)
SUB A
SUB 1
SUB (HL)
SUB (IX+1)
SBC A,A
SBC A,1
SBC A,(HL)
SBC A,(IX+1)
SBC A
SBC 1
SBC (HL)
SBC (IX+1)
AND A,A
AND A,1
AND A,(HL)
AND A,(IX+1)
AND A
AND 1
AND (HL)
AND (IX+1)
OR A,A
OR A,1
OR A,(HL)
OR A,(IX+1)
OR A
OR 1
OR (HL)
OR (IX+1)
XOR A,A
XOR A,1
XOR A,(HL)
XOR A,(IX+1)
XOR A
XOR 1
XOR (HL)
XOR (IX+1)
CP A,A
CP A,1
CP A,(HL)
CP A,(IX+1)
CP A
CP 1
CP (HL)
CP (IX+1)
.END
|
import slimevolleygym.slimevolley
import slimevolleygym.mlp
import slimevolleygym.slimevolley_adversarial
from slimevolleygym.slimevolley_adversarial import *
from slimevolleygym.slimevolley import *
|
module SVGAbstract
#Inline css style
class StyleAttrHelper
def initialize
@styles = {}
end
def to_s
#Note that underscores in key are converted to hyphens.
kvpairs = @styles.map do |k,v|
kk = k.to_s.gsub('_', '-')
"#{kk}: #{v}"
end
return kvpairs.join('; ')
end
def set_styles(new_styles)
@styles.merge!(new_styles)
end
end
end
|
import Detector from './utils/detector'
import Filter from './modules/filter'
import Hero from './modules/hero'
import Contact from './modules/contact'
function init () {
// Check for webGL capabilities
if (!Detector.webgl || /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent)) {
console.log('no gl')
} else {
if (document.querySelector('.landing--3d') !== null) {
const container = document.querySelector('.landing--3d')
let hero = new Hero(container)
hero.render()
}
}
var $navbarBurgers = Array.prototype.slice.call(document.querySelectorAll('.navbar-burger'), 0)
if ($navbarBurgers.length > 0) {
$navbarBurgers.forEach(function ($el) {
$el.addEventListener('click', function () {
var targetbrand = $el.dataset.targetbrand
var target = $el.dataset.target
var $target = document.getElementById(target)
var $targetbrand = document.getElementById(targetbrand)
$el.classList.toggle('is-active')
$target.classList.toggle('is-active')
$targetbrand.classList.toggle('is-active')
})
})
}
document.querySelectorAll('.tab-toggle').forEach($el => {
let target = $el.dataset.target
document.querySelectorAll('.code').forEach($el => { $el.style.display = 'none' })
$el.addEventListener('click', () => {
document.querySelectorAll('.code').forEach($el => { $el.style.display = 'none' })
document.querySelectorAll('.tab-toggle').forEach($el => { $el.classList.remove('is-active') })
$el.classList.add('is-active')
document.querySelector('.code-' + target).style.display = 'block'
})
})
if (document.querySelector('.code-html')) {
document.querySelector('.code-html').style.display = 'block'
document.querySelectorAll('.tab-toggle')[0].classList.add('is-active')
}
document.querySelectorAll('.ld-layout-toggle').forEach($el => {
$el.addEventListener('click', () => {
document.querySelectorAll('.ld-layout-toggle').forEach($el => { $el.classList.remove('is-active') })
if ($el.classList.contains('ld-layout-h')) {
document.querySelector('.ld-layout-h').classList.add('is-active')
document.querySelector('.tabs').parentElement.parentElement.style.flexDirection = 'column-reverse'
} else {
document.querySelector('.ld-layout-v').classList.add('is-active')
document.querySelector('.tabs').parentElement.parentElement.style.flexDirection = 'row'
}
})
})
if (document.querySelector('.laptop') !== null) {
window.onscroll = function () {
let laptop = document.querySelector('.laptop')
let laptopC = laptop.getBoundingClientRect()
let y1 = 200
let y2 = -150
if (laptopC.top < y1 && laptopC.top > y2) {
let percent = (laptopC.top - y1) / (y2 - y1)
percent = percent * percent * percent
document.querySelector('.is-laptop').style.setProperty('--laptop-origin', (100 - percent * 100) + '%')
document.querySelector('.is-laptop').style.setProperty('--laptop-deg', (-79 - percent * 101) + 'deg')
document.querySelector('.is-laptop').style.setProperty('--laptop-percent', percent)
}
}
}
if (document.querySelector('.is-personal-informations') !== null) {
new Contact()
}
if (document.querySelector('.filters-toggle') !== null) {
new Filter()
}
}
window.onload = () => {
init()
}
|
require 'asciidoctor/doctest'
require 'thread_safe'
require 'tilt'
require 'tilt/haml'
def engine
ENV['ENGINE'] || 'slim'
end
def pattern
ENV['PATTERN'] || '*:*'
end
namespace :generate do
DocTest::GeneratorTask.new(:html5) do |task|
task.title = "Generate testing examples #{pattern} for HTML5 using #{engine.capitalize} templates."
task.output_suite = DocTest::HTML::ExamplesSuite.new(
examples_path: 'test/examples/html5',
paragraph_xpath: './div/p/node()'
)
task.converter_opts[:template_dirs] = File.join(engine, 'html5')
task.examples_path.unshift 'test/examples/asciidoc-html'
end
end
|
import React from 'react'
import PropTypes from 'prop-types'
import Profile from '../Profile'
import Links from '../Links'
import Milestones from '../Milestones'
function ProfilePage({ profile, username }) {
return (
<main>
{
<>
<Profile profile={profile} username={username} />
<Links links={profile.links} />
</>
}
{profile.milestones && <Milestones milestones={profile.milestones} />}
</main>
)
}
ProfilePage.propTypes = {
username: PropTypes.string.isRequired,
profile: PropTypes.shape({
name: PropTypes.string.isRequired,
bio: PropTypes.string.isRequired,
avatar: PropTypes.string.isRequired,
links: PropTypes.arrayOf(
PropTypes.shape({
icon: PropTypes.string,
name: PropTypes.string,
url: PropTypes.string,
}),
),
milestones: PropTypes.arrayOf(
PropTypes.shape({
title: PropTypes.string,
image: PropTypes.string,
date: PropTypes.string,
icon: PropTypes.string,
color: PropTypes.string,
description: PropTypes.string,
url: PropTypes.string,
}),
),
}),
}
export default ProfilePage
|
library RheumaticDrugTherapyDecision version '1.0.0'
using FHIR version '4.0.0'
include FHIRHelpers version '4.0.0' called FHIRHelpers
include RheumaticDrugTherapy version '1.0.0' called RDT
codesystem "ICD-10-CM": 'http://hl7.org/fhir/sid/icd-10-cm'
codesystem "LOINC": 'http://loinc.org'
codesystem "HCPCS": 'https://hcpcs.codes/'
codesystem "SNOMED-CT": 'http://snomed.info/sct'
codesystem "CPT": 'https://www.aapc.com/resources/medical-coding/cpt.aspx'
codesystem "RXNORM" : 'http://www.nlm.nih.gov/research/umls/rxnorm'
valueset "Etanercept Medication": '2.16.840.1.113883.3.6037.1001.23.93.245'
parameter "QuestionnaireResponse" QuestionnaireResponse
parameter "MedicationRequest" MedicationRequest
context Patient
define EtanerceptMedicationRequest:
First(("MedicationRequest".medication as CodeableConcept).coding) in "Etanercept Medication"
define QT:
"QuestionnaireResponse".item
define PsoriaticArthritisResponse:
("QT".item I
where I.linkId.value = 'PsoriaticArthritis')QR
return First(QR.answer).value
define PsoriaticArthritisCondition:
RDT."PsoriaticArthritis"
define PsoriaticArthritis:
Coalesce(First("PsoriaticArthritisResponse"), "PsoriaticArthritisCondition")
define RheumatoidArthritisResponse:
("QT".item I
where I.linkId.value = 'RheumatoidArthritis')QR
return First(QR.answer).value
define RheumatoidArthritisCondition:
RDT."RheumatoidArthritis"
define RheumatoidArthritis:
Coalesce(First("RheumatoidArthritisResponse"), "RheumatoidArthritisCondition")
define AnkylosingSpondylitisResponse:
("QT".item I
where I.linkId.value = 'AnkylosingSpondylitis')QR
return First(QR.answer).value
define AnkylosingSpondylitisCondition:
RDT."AnkylosingSpondylitis"
define AnkylosingSpondylitis:
Coalesce(First("AnkylosingSpondylitisResponse"), "AnkylosingSpondylitisCondition")
define PlaquePsoriasisResponse:
("QT".item I
where I.linkId.value = 'PlaquePsoriasis')QR
return First(QR.answer).value
define PlaquePsoriasisCondition:
RDT."PlaquePsoriasis"
define PlaquePsoriasis:
Coalesce(First("PlaquePsoriasisResponse"), "PlaquePsoriasisCondition")
define PrimaryDiagnosisDecision:
if (("PsoriaticArthritis" = true)
or ("RheumatoidArthritis" = true)
or ("AnkylosingSpondylitis" = true)
or ("PlaquePsoriasis" = true))
then true
else false
define RheumatoidFactorTestResponse:
("QT".item I
where I.linkId.value = 'RheumatoidFactorTest')QR
return First(QR.answer).value
define RheumatoidFactorTestObservation:
RDT."RheumatoidFactorTest"
define RheumatoidFactorTest:
Coalesce(First("RheumatoidFactorTestResponse"), "RheumatoidFactorTestObservation")
define RheumatoidFactorTestResultResponse:
("QT".item I
where I.linkId.value = 'RheumatoidFactorTestResult')QR
return First(QR.answer).value
define RheumatoidFactorTestResultObservation:
RDT."RheumatoidFactorTestResult"
define RheumatoidFactorTestResult:
Coalesce(First("RheumatoidFactorTestResultResponse"), "RheumatoidFactorTestResultObservation")
define AntiCCPTestResponse:
("QT".item I
where I.linkId.value = 'AntiCCPTest')QR
return First(QR.answer).value
define AntiCCPTestObservation:
RDT."AntiCCPTest"
define AntiCCPTest:
Coalesce(First("AntiCCPTestResponse"), "AntiCCPTestObservation")
define AntiCCPTestResultResponse:
("QT".item I
where I.linkId.value = 'AntiCCPTestResult')QR
return First(QR.answer).value
define AntiCCPTestResultObservation:
RDT."AntiCCPTestResult"
define AntiCCPTestResult:
Coalesce(First("AntiCCPTestResultResponse"), "AntiCCPTestResultObservation")
define BiomarkersDecision:
if (("RheumatoidFactorTestResult" = true)
or ("AntiCCPTestResult" = true))
then true
else false
define PastConventionalDMARDTherapyResponse:
("QT".item I
where I.linkId.value = 'PastConventionalDMARDTherapy')QR
return First(QR.answer).value
define PastConventionalDMARDTherapyProc:
RDT."PastConventionalDMARDTherapy"
define PastConventionalDMARDTherapy:
Coalesce(First("PastConventionalDMARDTherapyResponse"), "PastConventionalDMARDTherapyProc")
define PastConventionalDMARDTherapyDurationResponse:
("QT".item I
where I.linkId.value = 'PastConventionalDMARDTherapyDuration')QR
return First(QR.answer).value
define PastConventionalDMARDTherapyDurationProc:
RDT."PastConventionalDMARDTherapyDuration"
define "PastConventionalDMARDTherapyDuration":
Coalesce(First("PastConventionalDMARDTherapyDurationResponse"), "PastConventionalDMARDTherapyDurationProc")
define PastConventionalDMARDTherapyOutcomeResponse:
("QT".item I
where I.linkId.value = 'ThyroidEyeCondition')QR
return First(QR.answer).value
define PastConventionalDMARDTherapyOutcomeProc:
RDT."PastConventionalDMARDTherapyOutcome"
define PastConventionalDMARDTherapyOutcome:
Coalesce(First("PastConventionalDMARDTherapyOutcomeResponse"), "PastConventionalDMARDTherapyOutcomeProc")
define ContraindicationDMARDTherapyResponse:
("QT".item I
where I.linkId.value = 'ContraindicationDMARDTherapy')QR
return First(QR.answer).value
define ContraindicationDMARDTherapyProc:
RDT."ContraindicationDMARDTherapy"
define ContraindicationDMARDTherapy:
Coalesce(First("ContraindicationDMARDTherapyResponse"), "ContraindicationDMARDTherapyProc")
define DMARDTherapyDecision:
if ((("PastConventionalDMARDTherapy" = true) and ("PastConventionalDMARDTherapyDuration" = true)
and "PastConventionalDMARDTherapyOutcome" = false)
or "ContraindicationDMARDTherapy" = true)
then true
else false
define PastConventionalNSAIDTherapyResponse:
("QT".item I
where I.linkId.value = 'PastConventionalNSAIDTherapy')QR
return First(QR.answer).value
define PastConventionalNSAIDTherapyProc:
RDT."PastConventionalNSAIDTherapy"
define PastConventionalNSAIDTherapy:
Coalesce(First("PastConventionalNSAIDTherapyResponse"), "PastConventionalNSAIDTherapyProc")
define PastConventionalNSAIDTherapyOutcomeResponse:
("QT".item I
where I.linkId.value = 'PastConventionalNSAIDTherapyOutcome')QR
return First(QR.answer).value
define PastConventionalNSAIDTherapyOutcomeProc:
RDT."PastConventionalNSAIDTherapyOutcome"
define PastConventionalNSAIDTherapyOutcome:
Coalesce(First("PastConventionalNSAIDTherapyOutcomeResponse"), "PastConventionalNSAIDTherapyOutcomeProc")
define NSAIDContraindicationResponse:
("QT".item I
where I.linkId.value = 'NSAIDContraindication')QR
return First(QR.answer).value
define NSAIDContraindicationProc:
RDT."NSAIDContraindication"
define NSAIDContraindication:
Coalesce(First("NSAIDContraindicationResponse"), "NSAIDContraindicationProc")
define NSAIDTherapyDecision:
if (("PastConventionalNSAIDTherapy" = true and ("PastConventionalNSAIDTherapyOutcome" = false))
or "NSAIDContraindication" = true)
then true
else false
define PreviousBiologicDrugsResponse:
("QT".item I
where I.linkId.value = 'PreviousBiologicDrugs')QR
return First(QR.answer).value
define PreviousBiologicDrugsMed:
RDT."PreviousBiologicDrugs"
define PreviousBiologicDrugs:
Coalesce(First("PreviousBiologicDrugsResponse"), "PreviousBiologicDrugsMed")
define BiologicOutcomeResponse:
("QT".item I
where I.linkId.value = 'BiologicOutcome')QR
return First(QR.answer).value
define BiologicOutcomeMed:
RDT."BiologicOutcome"
define BiologicOutcome:
Coalesce(First("BiologicOutcomeResponse"), "BiologicOutcomeMed")
define BiologicDecision:
if ((("PreviousBiologicDrugs" = true) and ("BiologicOutcome" = true))
or ("PreviousBiologicDrugs" = false))
then true
else false
define CombinationOtherDrugsResponse:
("QT".item I
where I.linkId.value = 'CombinationOtherDrugs')QR
return First(QR.answer).value
define CombinationOtherDrugsProc:
RDT."CombinationOtherDrugs"
define CombinationOtherDrugs:
Coalesce(First("CombinationOtherDrugsResponse"), "CombinationOtherDrugsProc")
define BacterialFungalInfectionsResponse:
("QT".item I
where I.linkId.value = 'BacterialFungalInfections')QR
return First(QR.answer).value
define BacterialFungalInfectionsCondition:
RDT."BacterialFungalInfections"
define BacterialFungalInfections:
Coalesce(First("BacterialFungalInfectionsResponse"), "BacterialFungalInfectionsCondition")
define COPDResponse:
("QT".item I
where I.linkId.value = 'COPD')QR
return First(QR.answer).value
define COPDCondition:
RDT."COPD"
define COPD:
Coalesce(First("COPDResponse"), "COPDCondition")
define ContraindicationDecision:
if ("BacterialFungalInfections" = true
or "COPD" = true)
then true
else false
define RheumatologistAuthroizationResponse:
("QT".item I
where I.linkId.value = 'RheumatologistAuthroization')QR
return First(QR.answer).value
define RheumatologistAuthroization:
if (First("RheumatologistAuthroizationResponse") = true)
then true
else false
define DecisionForPSA:
if ("PsoriaticArthritis" = true
and "DMARDTherapyDecision" = true)
then true
else false
define DecisionForRA:
if ("RheumatoidArthritis" = true
and "DMARDTherapyDecision" = true)
then true
else false
define DecisionForAS:
if ("AnkylosingSpondylitis" = true
and "NSAIDTherapyDecision" = true)
then true
else false
define QualifiedDiagnosisDecision:
if ("DecisionForPSA" = true
or "DecisionForRA" = true
or "DecisionForAS" = true
or "PlaquePsoriasis" = true)
then true
else false
define PrimaryDecision:
if ("QualifiedDiagnosisDecision" = true
and "BiomarkersDecision" = true
and "BiologicDecision" = true)
then true
else false
define DecisionPending:
if ("CombinationOtherDrugs" = true
and "ContraindicationDecision" = true)
then true
else false
define FinalDecision:
if ("PrimaryDecision" = true
and "DecisionPending" = false)
then 'YES'
else 'HUMAN REVIEW NEEDED'
|
#!/usr/bin/perl -lp
@==sort@$=map$_.shift@=,@@for@@=/\pL|,/g;$_=$$[$_]
|
package script_test
import (
"errors"
"sync"
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
boshscript "github.com/cloudfoundry/bosh-agent/agent/script"
fakedrainscript "github.com/cloudfoundry/bosh-agent/agent/script/drain/fakes"
fakescript "github.com/cloudfoundry/bosh-agent/agent/script/fakes"
boshlog "github.com/cloudfoundry/bosh-utils/logger"
)
var _ = Describe("ParallelScript", func() {
var (
scripts []boshscript.Script
parallelScript boshscript.ParallelScript
)
BeforeEach(func() {
scripts = []boshscript.Script{}
})
JustBeforeEach(func() {
logger := boshlog.NewLogger(boshlog.LevelNone)
parallelScript = boshscript.NewParallelScript("run-me", scripts, logger)
})
Describe("Tag", func() {
It("returns empty string", func() {
Expect(parallelScript.Tag()).To(Equal(""))
})
})
Describe("Path", func() {
It("returns empty string", func() {
Expect(parallelScript.Path()).To(Equal(""))
})
})
Describe("Exists", func() {
It("returns true", func() {
Expect(parallelScript.Exists()).To(BeTrue())
})
})
Describe("Run", func() {
Context("when there are no scripts", func() {
BeforeEach(func() {
scripts = []boshscript.Script{}
})
It("succeeds", func() {
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
})
})
Context("when script exists", func() {
var existingScript *fakescript.FakeScript
BeforeEach(func() {
existingScript = &fakescript.FakeScript{}
existingScript.TagReturns("fake-job-1")
existingScript.PathReturns("path/to/script1")
existingScript.ExistsReturns(true)
scripts = append(scripts, existingScript)
})
It("executes the script and succeeds", func() {
existingScript.RunReturns(nil)
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
Expect(existingScript.RunCallCount()).To(Equal(1))
})
It("gives an error when script fails", func() {
existingScript.RunReturns(errors.New("fake-error"))
err := parallelScript.Run()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal("1 of 1 run-me scripts failed. Failed Jobs: fake-job-1."))
Expect(existingScript.RunCallCount()).To(Equal(1))
})
})
Context("when script does not exist", func() {
var nonExistingScript *fakescript.FakeScript
BeforeEach(func() {
nonExistingScript = &fakescript.FakeScript{}
nonExistingScript.ExistsReturns(false)
scripts = append(scripts, nonExistingScript)
})
It("succeeds", func() {
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
})
})
Context("when running scripts concurrently", func() {
var existingScript1 *fakescript.FakeScript
var existingScript2 *fakescript.FakeScript
BeforeEach(func() {
existingScript1 = &fakescript.FakeScript{}
existingScript1.TagReturns("fake-job-1")
existingScript1.PathReturns("path/to/script1")
existingScript1.ExistsReturns(true)
scripts = append(scripts, existingScript1)
existingScript2 = &fakescript.FakeScript{}
existingScript2.TagReturns("fake-job-2")
existingScript2.PathReturns("path/to/script2")
existingScript2.ExistsReturns(true)
scripts = append(scripts, existingScript2)
})
It("executes the scripts and succeeds", func() {
existingScript1.RunReturns(nil)
existingScript2.RunReturns(nil)
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
Expect(existingScript1.RunCallCount()).To(Equal(1))
Expect(existingScript2.RunCallCount()).To(Equal(1))
})
It("returns two failed statuses when both scripts fail", func() {
existingScript1.RunReturns(errors.New("fake-error"))
existingScript2.RunReturns(errors.New("fake-error"))
err := parallelScript.Run()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("2 of 2 run-me scripts failed. Failed Jobs:"))
Expect(err.Error()).To(ContainSubstring("fake-job-1"))
Expect(err.Error()).To(ContainSubstring("fake-job-2"))
Expect(err.Error()).ToNot(ContainSubstring("Successful Jobs"))
})
It("returns one failed status when first script fail and second script pass, and when one fails continue waiting for unfinished tasks", func() {
existingScript1.RunStub = func() error {
time.Sleep(2 * time.Second)
return errors.New("fake-error")
}
existingScript2.RunReturns(nil)
err := parallelScript.Run()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal("1 of 2 run-me scripts failed. Failed Jobs: fake-job-1. Successful Jobs: fake-job-2."))
})
It("returns one failed status when first script pass and second script fail", func() {
existingScript1.RunReturns(nil)
existingScript2.RunReturns(errors.New("fake-error"))
err := parallelScript.Run()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal("1 of 2 run-me scripts failed. Failed Jobs: fake-job-2. Successful Jobs: fake-job-1."))
})
It("waits for scripts to finish", func() {
existingScript1.RunStub = func() error {
time.Sleep(2 * time.Second)
return nil
}
existingScript2.RunReturns(nil)
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
Expect(existingScript1.RunCallCount()).To(Equal(1))
Expect(existingScript2.RunCallCount()).To(Equal(1))
})
It("runs the scripts concurrently", func(done Done) {
waitGroup := &sync.WaitGroup{}
waitGroup.Add(2)
deadlockUnlessConcurrent := func() error {
waitGroup.Done()
waitGroup.Wait()
return nil
}
existingScript1.RunStub = deadlockUnlessConcurrent
existingScript2.RunStub = deadlockUnlessConcurrent
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
Expect(existingScript1.RunCallCount()).To(Equal(1))
Expect(existingScript2.RunCallCount()).To(Equal(1))
close(done)
})
})
})
Describe("Cancel", func() {
Context("when there are no scripts", func() {
BeforeEach(func() {
scripts = []boshscript.Script{}
})
It("succeeds", func() {
err := parallelScript.Cancel()
Expect(err).ToNot(HaveOccurred())
})
})
Context("when script exists and is not cancelable", func() {
var existingScript *fakescript.FakeScript
BeforeEach(func() {
existingScript = &fakescript.FakeScript{}
existingScript.TagReturns("fake-job-1")
existingScript.PathReturns("path/to/script1")
existingScript.ExistsReturns(true)
scripts = append(scripts, existingScript)
})
It("returns error", func() {
existingScript.RunReturns(nil)
err := parallelScript.Cancel()
Expect(err).To(HaveOccurred())
})
})
Context("when script exists and is cancelable", func() {
var existingScript *fakedrainscript.FakeScript
BeforeEach(func() {
existingScript = fakedrainscript.NewFakeScript("fake-tag")
scripts = append(scripts, existingScript)
})
It("succeeds", func() {
err := parallelScript.Cancel()
Expect(err).ToNot(HaveOccurred())
})
})
Context("when run cancelable scripts in parallel", func() {
var existingScript1 *fakedrainscript.FakeScript
var existingScript2 *fakedrainscript.FakeScript
BeforeEach(func() {
existingScript1 = fakedrainscript.NewFakeScript("fake-job1")
scripts = append(scripts, existingScript1)
existingScript2 = fakedrainscript.NewFakeScript("fake-job2")
scripts = append(scripts, existingScript2)
})
It("succeeds", func() {
err := parallelScript.Run()
Expect(err).ToNot(HaveOccurred())
err = parallelScript.Cancel()
Expect(err).ToNot(HaveOccurred())
Expect(existingScript1.WasCanceled).To(BeTrue())
Expect(existingScript2.WasCanceled).To(BeTrue())
})
})
})
})
|
package platform;
/**
* Launches both containers and associated agents.
*
* @author Andrei Olaru
*/
public class SingleLauncher
{
/**
* Creates and launches containers.
*
* @param args
* - not used.
*/
public static void main(String[] args)
{
MainContainerLauncher main = new MainContainerLauncher();
SlaveContainerLauncher slave = new SlaveContainerLauncher();
main.setupPlatform();
slave.setupPlatform();
main.startAgents();
slave.startAgents();
}
}
|
namespace Computer.Bus.Domain.Contracts;
public interface IPublishResult
{
public bool Success { get; }
public string? Reason { get; }
} |
require_relative 'test_helper'
require 'fileutils'
require 'pp'
class TestAsciiToGridFlat < Minitest::Test #:nodoc:
def setup
@g = AxialGrid.new()
@g.read_ascii_file_flat_topped_odd( 'test/ascii_map_flat_topped.txt' )
end
def test_reading
assert_equal( :a, @g.cget( 0 , 0 ).color )
assert_equal( :b, @g.cget( 2 , -1 ).color )
assert_equal( :c, @g.cget( 4 , -2 ).color )
assert_equal( :g, @g.cget( 1, 0 ).color )
assert_equal( :h, @g.cget( 3, -1 ).color )
assert_equal( :i, @g.cget( 5, -2 ).color )
assert_equal( :m, @g.cget( 0, 1 ).color )
assert_equal( :n, @g.cget( 2, 0 ).color )
assert_equal( :o, @g.cget( 4, -1 ).color )
end
def test_writting
@g.write_ascii_file_flat_topped_odd( '/tmp/ascii_map_flat_topped.txt' )
FileUtils.identical?( '/tmp/ascii_map_flat_topped.txt', 'test/ascii_map_flat_topped.txt' )
end
end |
import { mount, shallow } from 'enzyme';
import Pagination from './Pagination';
import React from 'react';
describe('Pagination', () => {
const pageButtonSelector = '.ds-c-button';
const onPageChange = jest.fn();
const render = (overrideProps = {}, shouldDeepRender = false) => {
const props = {
totalPages: 3,
onPageChange: onPageChange,
renderHref: () => '/test',
...overrideProps,
};
return shouldDeepRender ? mount(<Pagination {...props} />) : shallow(<Pagination {...props} />);
};
it('should render component', () => {
const wrapper = render({ currentPage: 2 });
expect(wrapper.is('nav')).toBe(true);
expect(wrapper).toMatchSnapshot();
});
describe('accessibility attributes', () => {
it('should have navigation label', () => {
const wrapper = render({ totalPages: 8 });
expect(wrapper.prop('aria-label')).toEqual('Pagination');
});
it('should set a custom navigation label', () => {
const wrapper = render({ totalPages: 8, ariaLabel: 'Pagey page page' });
expect(wrapper.prop('aria-label')).toEqual('Pagey page page');
});
});
it('should add custom className if specified', () => {
const customClassName = 'custom-class';
const wrapper = render({ totalPages: 8, className: customClassName });
expect(wrapper.hasClass(customClassName)).toBeTruthy();
});
describe('interactivity', () => {
describe('onPageChange', () => {
afterEach(() => {
jest.resetAllMocks();
});
it('should call onPageChange when "previous" is pressed', () => {
const wrapper = render({ currentPage: 2 });
wrapper.childAt(0).simulate('click', {});
expect(onPageChange).toHaveBeenCalledTimes(1);
expect(onPageChange).toHaveBeenCalledWith(expect.anything(), 1);
});
it('should call onPageChange when "next" is pressed', () => {
const wrapper = render({ currentPage: 2 });
wrapper.childAt(2).simulate('click', {});
expect(onPageChange).toHaveBeenCalledTimes(1);
expect(onPageChange).toHaveBeenCalledWith(expect.anything(), 3);
});
it('should call onPageChange when a page is pressed', () => {
const wrapper = render({ currentPage: 2 }, true);
wrapper.find('ul').childAt(0).find(pageButtonSelector).simulate('click', {});
expect(onPageChange).toHaveBeenCalledTimes(1);
expect(onPageChange).toHaveBeenCalledWith(expect.anything(), 1);
});
});
describe('href', () => {
it('should have appropriate href for "previous"', () => {
const wrapper = render({ currentPage: 2, renderHref: (currentPage) => `#${currentPage}` });
const prevEl = wrapper.childAt(0);
expect(prevEl.prop('href')).toBe('#1');
});
it('should have appropriate href for next page in the page set', () => {
const wrapper = render({
currentPage: 4,
totalPages: 8,
renderHref: (currentPage) => `#${currentPage}`,
});
const prevEl = wrapper.childAt(2);
expect(prevEl.prop('href')).toBe('#5');
});
it('should have appropriate href for "next"', () => {
const wrapper = render({ currentPage: 2, renderHref: (currentPage) => `#${currentPage}` });
const nextEl = wrapper.childAt(2);
expect(nextEl.prop('href')).toBe('#3');
});
});
});
describe('navigation slot behavior', () => {
it('should show "previous" navigation slot if current page is not first page of set', () => {
const wrapper = render({ currentPage: 2 });
const firstChild = wrapper.childAt(0);
expect(firstChild.dive().type()).toEqual('a');
expect(firstChild.dive().text()).toEqual('<ArrowIcon />Previous');
});
it('should hide "previous" navigation slot if current page is first page of set', () => {
const wrapper = render({ currentPage: 1 });
const prevButton = wrapper.childAt(0);
expect(prevButton.type()).toEqual('span');
});
it('should show "next" navigation slot if current page is not last page of set', () => {
const wrapper = render({ currentPage: 2 });
const lastChild = wrapper.children().last();
expect(lastChild.dive().type()).toEqual('a');
expect(lastChild.dive().text()).toEqual('Next<ArrowIcon />');
});
it('should hide "next" navigation slot if current page is last page of set', () => {
const wrapper = render({ currentPage: 3 });
const lastChild = wrapper.children().last();
expect(lastChild.type()).toEqual('span');
});
});
describe('pagination slot behavior', () => {
it('should begin page count with 1', () => {
const wrapper = render({ currentPage: 1 });
const firstPage = wrapper.find('ul').childAt(0).dive().find(pageButtonSelector);
expect(firstPage).toBeDefined();
expect(firstPage.text()).toBe('1');
});
it('should end page count with page total', () => {
const lastPageNum = 3;
const wrapper = render({ currentPage: 1, totalPages: lastPageNum }, true);
const lastPage = wrapper.find('ul').childAt(2).find(pageButtonSelector);
expect(lastPage).toBeDefined();
expect(lastPage.text()).toBe(`${lastPageNum}`);
});
it('should highlight current page with correct styles', () => {
const wrapper = render({ currentPage: 3, totalPages: 5 });
const currentPageEl = wrapper.find('ul').childAt(2).dive().find(pageButtonSelector);
expect(currentPageEl.type()).toEqual('span');
expect(currentPageEl.prop('aria-current')).toEqual('true');
expect(currentPageEl.hasClass('ds-c-pagination__current-page')).toBeTruthy();
});
describe('less than 7 pages', () => {
it('should show all pages', () => {
const totalPageNum = 5;
const wrapper = render({ currentPage: 1, totalPages: totalPageNum });
const listEl = wrapper.find('ul');
const pageItems = listEl.children();
expect(pageItems.length).toEqual(totalPageNum);
expect(listEl).toMatchSnapshot();
});
it('should never show ellipses', () => {
const wrapper = render({ totalPages: 6 });
expect(wrapper.find('Ellipses').length).toBe(0);
});
});
describe('more than 7 pages', () => {
it('should not show beginning ellipses for pages 1 - 3', () => {
const wrapper1 = render({ currentPage: 1, totalPages: 35 }, true);
const wrapper2 = render({ currentPage: 2, totalPages: 35 }, true);
const wrapper3 = render({ currentPage: 3, totalPages: 35 }, true);
expect(wrapper1.find('Ellipses').length).toBe(1);
let listEl = wrapper1.find('ul');
let secondSlot = listEl.childAt(1).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondSlot).toBeDefined();
expect(secondSlot.text()).toBe('2');
expect(wrapper2.find('Ellipses').length).toBe(1);
listEl = wrapper2.find('ul');
secondSlot = listEl.childAt(1).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondSlot).toBeDefined();
expect(secondSlot.text()).toBe('2');
expect(wrapper3.find('Ellipses').length).toBe(1);
listEl = wrapper3.find('ul');
secondSlot = listEl.childAt(1).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondSlot).toBeDefined();
expect(secondSlot.text()).toBe('2');
});
it('should not show end ellipses for last 3 pages', () => {
const wrapperLast = render({ currentPage: 35, totalPages: 35 }, true);
const wrapperSecondLast = render({ currentPage: 34, totalPages: 35 }, true);
const wrapperThirdLast = render({ currentPage: 33, totalPages: 35 }, true);
expect(wrapperLast.find('Ellipses').length).toBe(1);
let listEl = wrapperLast.find('ul');
let secondLastSlot = listEl.childAt(5).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondLastSlot).toBeDefined();
expect(secondLastSlot.text()).toBe('34');
expect(wrapperSecondLast.find('Ellipses').length).toBe(1);
listEl = wrapperSecondLast.find('ul');
secondLastSlot = listEl.childAt(5).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondLastSlot).toBeDefined();
expect(secondLastSlot.text()).toBe('34');
expect(wrapperThirdLast.find('Ellipses').length).toBe(1);
listEl = wrapperThirdLast.find('ul');
secondLastSlot = listEl.childAt(5).find(pageButtonSelector);
expect(listEl.children().length).toBe(7);
expect(secondLastSlot).toBeDefined();
expect(secondLastSlot.text()).toBe('34');
});
it('should show both ellipses for number in middle', () => {
const wrapperEndMiddle = render({ currentPage: 10, totalPages: 35 });
const wrapperBeginningMiddle = render({ currentPage: 30, totalPages: 35 });
let listEl = wrapperEndMiddle.find('ul');
expect(listEl.children().length).toBe(7);
expect(wrapperEndMiddle.find('Ellipses').length).toBe(2);
listEl = wrapperBeginningMiddle.find('ul');
expect(wrapperBeginningMiddle.find('Ellipses').length).toBe(2);
expect(listEl.children().length).toBe(7);
});
});
});
describe('with compact prop enabled', () => {
it('should render compact variant', () => {
const wrapper = render({ currentPage: 2, compact: true });
const compactClassName = 'ds-c-pagination__page-count';
expect(wrapper.find(compactClassName)).toBeTruthy();
expect(wrapper.contains('ul')).toBe(false);
expect(wrapper).toMatchSnapshot();
});
it('should render non-interactive text nodes in place of pagination slot links', () => {
const wrapper = render({ currentPage: 2, compact: true });
const pages = wrapper.childAt(1);
expect(pages.type()).toEqual('span');
expect(pages.contains('a')).toBe(false);
});
});
describe('isNavigationHidden', () => {
it('should hide previous when on first page', () => {
const wrapper = render({ currentPage: 1, isNavigationHidden: true }, true);
const firstChild = wrapper.find('.ds-c-pagination__nav').first();
expect(firstChild.type()).toEqual('span');
expect(firstChild.props().style).toHaveProperty('visibility', 'hidden');
});
it('should hide next when on last page', () => {
const wrapper = render({ currentPage: 3, isNavigationHidden: true }, true);
const lastChild = wrapper.find('.ds-c-pagination__nav').last();
expect(lastChild.type()).toEqual('span');
expect(lastChild.props().style).toHaveProperty('visibility', 'hidden');
});
});
});
|
import {
buildForm,
buildMultiField,
buildCustomField,
buildSection,
buildSubmitField,
Form,
FormModes,
DefaultEvents,
} from '@island.is/application/core'
import { m } from '../lib/messages'
import Logo from '../assets/Logo'
export const CollectEndorsementsForm: Form = buildForm({
id: 'CollectEndorsement',
title: m.constituencySection.title,
logo: Logo,
mode: FormModes.APPLYING,
children: [
buildSection({
id: 'endorsementSection',
title: m.endorsementList.title,
children: [
buildMultiField({
id: 'endorsements',
title: m.endorsementList.title,
children: [
buildCustomField({
id: 'endorsements',
title: m.endorsementList.title,
component: 'EndorsementList',
}),
],
}),
],
}),
buildSection({
id: 'endorsementListSubmittion',
title: m.endorsementListSubmission.shortTitle,
children: [
buildCustomField({
id: 'selectEndorsementsList',
title: m.endorsementListSubmission.title,
component: 'EndorsementListSubmission',
}),
],
}),
buildSection({
id: 'overviewSection',
title: m.overviewSection.title,
children: [
buildMultiField({
id: 'overviewSubmit',
title: m.overviewSection.title,
description: m.overviewSection.description,
children: [
buildCustomField({
id: 'review',
title: '',
component: 'Overview',
}),
buildSubmitField({
id: 'submit',
title: '',
placement: 'footer',
actions: [
{
event: DefaultEvents.SUBMIT,
name: m.overviewSection.submitApplication,
type: 'primary',
},
],
}),
],
}),
buildCustomField({
id: 'applicationApproved',
title: m.applicationApproved.title,
component: 'PartyApplicationApproved',
}),
],
}),
],
})
|
# frozen_string_literal: true
require 'iso_country_codes'
require 'rest_client'
require 'dotenv'
require 'uri'
require 'logger'
require 'json'
require 'uuid'
require 'import_export/version'
require 'import_export/source'
require 'import_export/result'
require 'import_export/client'
require 'import_export/query'
Dotenv.load
RestClient.log = $stdout unless ENV['DEBUG'].to_s.empty?
module ImportExport
API_BASE = 'https://data.trade.gov/consolidated_screening_list/v1/'
def self.user_agent
"ImportExport/#{ImportExport::VERSION}; +https://github.com/benbalter/import_export)"
end
end
|
<?php
/**
* Router. Matches action from request
*
* Copyright © 2013-2017 Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
namespace Magento\Framework\App;
interface RouterInterface
{
/**
* Match application action by request
*
* @param RequestInterface $request
* @return ActionInterface
*/
public function match(RequestInterface $request);
}
|
import React from 'react';
import { View, Text, Image } from '@tarojs/components';
import { router } from '@ysyp/utils/dist/router';
import { AtIcon } from 'taro-ui';
// import { Title } from '@tarojs/components'
// http://www.fhdq.net/bd/94.html
/** <>()()〈〉‹›﹛﹜『』〖〗[]《》﹝﹞〔〕{}「」【】︵︶︷︸︿﹀︹︺︽︾﹁﹂﹃﹄︻︼ */
export interface IOrderTitleProps {
title: string;
icon?: string;
deleteIcon?: boolean;
subTitle?: string;
more?: string;
url?: string;
fontSize?: string;
padding?: string;
}
export const YYOrderTitle = (props: IOrderTitleProps) => {
const { title, icon, subTitle, more, url, fontSize = '12px', padding = '10px', deleteIcon } = props;
return (
<View
className='yy-title yy-order-title'
style={{
fontSize,
padding,
}}
>
<View
className='yy-title-left'
onClick={() =>
more &&
url &&
router.navigateTo({
url,
})
}
>
<Text className='yy-title-text'>
{!!icon && (
<Image
src={icon}
className='yy-title-icon'
style={{
width: fontSize,
}}
/>
)}
{title} 〉
</Text>
{subTitle && <Text className='yy-title-subtext'>{subTitle}</Text>}
</View>
<View
className='yy-title-right'
onClick={() => {
console.log('yy-title-right');
}}
>
已取消
</View>
</View>
);
};
|
<?php
namespace ValuModeler\Service;
use ValuModeler\Model;
use ValuSo\Annotation as ValuService;
class FieldService extends AbstractEntityService
{
/**
* Does document have a named field
*
* @param string $document
* @param string $name
* @return boolean
*/
public function exists($document, $name)
{
$document = $this->resolveDocument($document, true);
return $document->getField($name) !== null;
}
/**
* Create a new field to document
*
* @param string $document
* @param string $name
* @param string $fieldType
* @param array $specs
* @return boolean True on success, false otherwise
*/
public function create($document, $name = null, $fieldType = null, array $specs = array())
{
$document = $this->resolveDocument($document, true);
if (isset($name)) {
$specs['name'] = $name;
}
if (isset($fieldType)) {
$specs['fieldType'] = $fieldType;
}
$response = $this->proxy()->doCreate($document, $specs);
if ($response) {
$this->getDocumentManager()->flush($document);
}
return $response;
}
/**
* Batch-create fields
*
* @param string $document
* @param array $fields
* @return array
*/
public function createMany($document, $fields)
{
$document = $this->resolveDocument($document, true);
$responses = array();
foreach ($fields as $key => $specs) {
$responses[$key] = $this->proxy()->doCreate($document, $specs);
}
$this->getDocumentManager()->flush($document);
return $responses;
}
/**
* Update field
*
* @param string|\ValuModeler\Model\Document $document
* @param string $name
* @param array $specs
* @return \ValuModeler\Model\Field|NULL
*/
public function update($document, $name, array $specs)
{
$document = $this->resolveDocument($document, true);
$field = $document->getField($name);
if (!$field) {
throw new Exception\FieldNotFoundException(
'Document %DOCUMENT% does not contain field %FIELD%',
['DOCUMENT' => $document->getName(), 'FIELD' => $name]);
}
$result = $this->proxy()->doUpdate($document, $field, $specs);
$this->getDocumentManager()->flush($document);
return $result;
}
/**
* Create a new field or update existing
*
* @param string|\ValuModeler\Model\Document $document
* @param string $name
* @param array $specs
* @return \ValuModeler\Model\Field|NULL
*/
public function upsert($document, $name, array $specs)
{
$document = $this->resolveDocument($document, true);
$field = $this->doUpsert($document, $name, $specs);
$this->getDocumentManager()->flush($document);
return $field;
}
/**
* Batch-upsert fields
*
* @param string|\ValuModeler\Model\Document $document
* @param array $fields
* @return array
*/
public function upsertMany($document, array $fields)
{
$document = $this->resolveDocument($document, true);
$results = array();
foreach ($fields as $key => $specs) {
if (isset($specs['name'])) {
$results[$key] = $this->upsert($document, $specs['name'], $specs);
} else {
$results[$key] = null;
}
}
$this->getDocumentManager()->flush($document);
return $results;
}
/**
* Remove a field from document
*
* @param string $document
* @param string $name
*/
public function remove($document, $name)
{
$document = $this->resolveDocument($document, true);
$response = $this->proxy()->doRemove($document, $name);
$this->getDocumentManager()->flush($document);
return $response;
}
/**
* Batch-remove fields from document
*
* @param array $fields
*/
public function removeMany($document, array $fields)
{
$document = $this->resolveDocument($document, true);
$responses = array();
foreach ($fields as $key => $name) {
$responses[$key] = $this->proxy()->doRemove($document, $name);
}
if (in_array(true, $responses, true)) {
$this->getDocumentManager()->flush($document);
}
return $responses;
}
/**
* Create a new field
*
* @param Model\Document $document
* @param array $specs
* @return boolean
*
* @ValuService\Trigger({"type":"post","name":"post.<service>.create"})
* @ValuService\Trigger({"type":"post","name":"post.valumodelerdocument.change","args":{"document"}})
*/
protected function doCreate(Model\Document $document, array $specs)
{
$fullSpecs = $specs;
$specs = $this->filterAndValidate('field', $specs, false);
$field = new Model\Field($specs['name'], $specs['fieldType']);
$document->addField($field);
unset($specs['type']);
unset($specs['fieldType']);
$field->setOptions(array_merge($fullSpecs, $specs));
return $field;
}
/**
* Perform field removal
*
* @param Model\Document $document
* @param string $name
*
* @ValuService\Trigger({"type":"post","name":"post.<service>.remove"})
* @ValuService\Trigger({"type":"post","name":"post.valumodelerdocument.change","args":{"document"}})
*/
protected function doRemove(Model\Document $document, $name)
{
return $document->removeField($name);
}
/**
* Perform field update
*
* @param Model\Document $document
* @param Model\Field $field
* @param array $specs
* @return boolean
*
* @ValuService\Trigger({"type":"post","name":"post.<service>.update"})
* @ValuService\Trigger({"type":"post","name":"post.valumodelerdocument.change","args":{"document"}})
*/
protected function doUpdate(Model\Document $document, Model\Field $field, array $specs)
{
$fullSpecs = $specs;
$specs = $this->filterAndValidate('field', $specs, true);
$field->setOptions(array_merge($fullSpecs, $specs));
return true;
}
/**
* Perform upsert
*
* @param Model\Document $document
* @param string $name
* @param array $specs
* @return \ValuModeler\Model\Field
*/
protected function doUpsert(Model\Document $document, $name, array $specs)
{
$document = $this->resolveDocument($document, true);
$field = $document->getField($name);
if ($field) {
$this->proxy()->doUpdate($document, $field, $specs);
return $field;
} else {
$specs['name'] = $name;
$field = $this->proxy()->doCreate($document, $specs);
return $field;
}
}
/**
* (non-PHPdoc)
* @see \ValuModeler\Service\AbstractEntityService::filterAndValidate()
*/
protected function filterAndValidate($entityType, array $specs, $useValidationGroup = false)
{
if ($entityType === 'field') {
if (isset($specs['fieldType']) && !Model\Field::getTypeFactory()->isValidFieldType($specs['fieldType'])) {
throw new Exception\UnknownFieldTypeException(
'Unknown field type: %TYPE%', array('TYPE' => $specs['fieldType']));
}
return parent::filterAndValidate($entityType, $specs, $useValidationGroup);
} else {
return parent::filterAndValidate($entityType, $specs, $useValidationGroup);
}
}
} |
# Инструкция по добавлению нового бандла с системным питоном
## Майним бандлы системного питона
Бандлы системного питон майнятся для трех платформ: linux, darwin, windows.
Подставляем под PYTHON_VERSION - версию нужного питона
### Linux
1. Устанавливаем систему с версией ubuntu, из которой планируется брать системный питон. Здесь есть два варианта
1. Если нужно собрать системный питон, который будет запускать тесты на дистбилде, то нужно использовать ubuntu такой же версии, что и на дистбилде.
Тут стоит учитывать, что на дистбилде может быть достаточно старая версия ubuntu, на котором не будет нужной версии питона.
2. Выбрать ту версию ubuntu, в которой есть нужный питон
2. `mkdir -p ~/work/packages`
3. `cd ~/work/packages`
4. майним deb-пакеты питона
1. Майним системный питон для запуска на дистбилде:
apt-get download $(apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances python{PYTHON_VERSION}-dev python{|3}-pkg-resources python{|3}-setuptools | grep "^\w" | sort -u)
rm libc6* libc-*
2. Майним системный питон для сборки сошек:
apt download python{PYTHON_VERSION} python{PYTHON_VERSION}-dev python{PYTHON_VERSION}-minimal libpython{PYTHON_VERSION} libpython{PYTHON_VERSION}-dev libpython{PYTHON_VERSION}-stdlib libpython{PYTHON_VERSION}-minimal
5. `cd ..`
6. `for path in $(ls packages); do ar -xf packages/$path; tar -xf data.tar.xz; done;`
7. `mv usr python`
8. `tar -czf python{PYTHON_VERSION}_linux.tar.gz python`
9. `ya upload python{PYTHON_VERSION}_linux.tar.gz -d "Ubuntu {UBUNTU_VERSION} x86_64 python{PYTHON_VERSION} installation" --do-not-remove`
UBUNTU_VERSION - версия ubuntu, на которой майнился системный питон
### Darwin
1. Находим macbook.
2. Все установленные питоны лежат в `/Library/Frameworks/Python.framework/Versions`
3. Копируем `/Library/Frameworks/Python.framework` в директорию с именем `python`
4. Чистим `python/Python.framework/Versions/` от ненужных питонов
5. Проверяем, что симлинки указывают в правильные места
1. `python/Python.framework/Versions/Current -> {PYTHON_VERSION}`
2. `python/Python.framework/Headers -> Versions/Current/Headers`
3. `python/Python.framework/Python -> Versions/Current/Python`
4. `python/Python.framework/Resources -> Versions/Current/Resources`
6. `tar -czf python{PYTHON_VERSION}_darwin.tar.gz python`
7. `ya upload python{PYTHON_VERSION}_darwin.tar.gz -d "Darwin x86_64 python{PYTHON_VERSION} installation" --do-not-remove`
Если нужного питона нет в системе, его нужно установить из `python.org`, его установку можно найти в стандартном месте.
Если нужен питон из `brew`, его установку можно найти в `/usr/local/Cellar/python*/{python_version}/Frameworks/`,
а дальше следовать стандартной инструкции
### Windows
1. Находим машинку с windows
2. Устанавливаем нужную версию питона из `python.org`
3. Копируем содержимое установки питона в директорию `python`
4. Пакуем директорию `python` в `python{PYTHON_VERSION}_windows.tar.gz`
5. `ya upload python{PYTHON_VERSION}_windows.tar.gz -d "Windows x86_64 python{PYTHON_VERSION} installation" --do-not-remove`
## Добавляем бандлы системного питона в сборку
1. Конфигурация бандлов системных питонов находится здесь [build/platform/python](https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python)
2. Добавляем сендбокс ресурсы собранных бандлов в файл [resources.inc](https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc)
SET(PYTHON38_LINUX sbr:1211259884)
3. Добавляем служебные переменные `_SYSTEM_PYTHON*, PY_VERSION, PY_FRAMEWORK_VERSION` для системного питона, если их еще нет,
в [ymake.core.conf](https://a.yandex-team.ru/arc/trunk/arcadia/build/ymake.core.conf?rev=7640792#L380) по аналогии.
"3.8" ? {
_SYSTEM_PYTHON38=yes
PY_VERSION=3.8
PY_FRAMEWORK_VERSION=3.8
}
4. Добавляем ресурс в [build/platform/python/ya.make](https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/ya.make)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON ${PYTHON38_LINUX})
## Проверяем сборку
1. Создаем тривиальный PY2MODULE с использование `c api` положенного питона, или находим подходящий в репозитории
2. Собираем его:
1. linux `ya make -DUSE_SYSTEM_PYTHON=3.8 --target-platform linux`
2. darwin `ya make -DUSE_SYSTEM_PYTHON=3.8 --target-platform darwin`
3. windows `ya make -DUSE_SYSTEM_PYTHON=3.8 --target-platform win`
3. Проверяем, что получившиеся модули импортятся в питонах на соответсвующих системах
|
require 'crave'
using Crave::Support
class Crave::Dependency::Base::Installation
private
def system_out(*args)
Open3.capture2(*args).first
end
def satisfies_dependency?(dependency)
true
end
# Here is the problem we're solving:
#
# There are multiple ways a program may be symlinked.
#
# Type 1: /usr/local/bin/ruby (link) -> /usr/local/Cellar/ruby-2,5.1/bin/ruby (real)
# Type 2: /usr/bin/ruby (link) -> /usr/bin/ruby2.4 (real)
# Type 2: /usr/bin/redis-server (link) -> /usr/bin/redis-check-rdb (real)
#
# In Type 1, (e.g. homebrew/linuxbrew)
# - real basename = link basename
# - real dirname != link dirname
# (the real executable is in an installation folder, symlinked to /usr/local/bin)
#
# In Type 2, (e.g. the Debian/Ubuntu alternatives system)
# - real basename != link basename
# (the real executable is suffixed with the version number, linked to the common name)
# - real dirname = link dirname
#
# In Type 3, (e.g. the busybox design)
# - real basename != link basename
# (as with busybox, the name of the link is significant -- the executable
# itself changes behaviour based on the command name)
# - real dirname = link dirname
#
# We want to treat each type differently.
#
# Type 1 and Type 2 - These are actually handled the same. We can just resolve
# the realpath and treat these as version-suffixed commands (with Type 1 having
# a blank suffix.)
#
# Type 3 - These we need to treat manually as not having a suffix. The way we
# tell between these cases is just by checking whether the link basename is a
# prefix of the real basename. This should work for most packages, but I can
# imagine some cases where it may fail, like say if the real busybox is named
# "foo-server" which is symlinked as "foo". Then the suffix would be detected
# as "-server" and we would erroneously look for "foo-server-server" and
# "foo-server" as the commands... If this case ever comes up we can come up
# with additional heuristics for telling these cases apart.
#
# @return [Array<Crave::Command>]
def find_commands(known_command_name, found_command_path, command_names)
found_command_basename = File.basename(found_command_path)
suffix = if found_command_basename.start_with?(known_command_name)
found_command_basename.gsub(/^#{Regexp.escape(known_command_name)}/, '')
else
""
end
dir = File.dirname(found_command_path)
command_names.map do |command_name|
Crave::Command.new(command_name, File.join(dir, "#{command_name}#{suffix}"))
end
end
end
|
package ai.platon.pulsar.crawl.common.options
import ai.platon.pulsar.common.config.AppConstants.EXAMPLE_URL
import ai.platon.pulsar.common.config.VolatileConfig
import ai.platon.pulsar.common.options.LoadOptions
import ai.platon.pulsar.common.urls.Hyperlink
import org.junit.Test
import java.time.Duration
import kotlin.test.assertEquals
import kotlin.test.assertFalse
import kotlin.test.assertTrue
/**
* Created by vincent on 16-7-20.
* Copyright @ 2013-2016 Platon AI. All rights reserved
*/
class TestUrlNormalizer {
private val conf = VolatileConfig()
val args1 = "-parse -incognito -expires 1s -retry -storeContent false -cacheContent false"
val args2 = "-incognito -expires 1d -storeContent true -cacheContent true"
val options1 = LoadOptions.parse(args1, conf)
val options2 = LoadOptions.parse(args2, conf)
val url1 = Hyperlink(EXAMPLE_URL, args = args1)
val url2 = Hyperlink(EXAMPLE_URL, args = args2)
@Test
fun testMerge() {
val args22 = url2.args
var options11 = options1.clone()
assertTrue(options11.parse)
assertTrue(options11.incognito)
assertFalse(options11.storeContent)
if (args22 != null) {
options11 = LoadOptions.parse("$options11 $args22", conf)
}
assertMergedOptions(options11, "options1 merge args2\n<$args22>\n$options11")
}
@Test
fun testNormalize() {
val options = LoadOptions.merge(options1, url2.args)
assertMergedOptions(options, "args1 merge args2\n$options")
}
private fun assertMergedOptions(options: LoadOptions, message: String) {
assertTrue(message) { options.storeContent }
assertTrue(message) { options.incognito }
assertTrue(message) { options.parse }
assertEquals(Duration.ofDays(1), options.expires, message)
}
}
|
package org.rrd4j.core;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Abstract byte array based backend.
*
*/
public abstract class RrdByteArrayBackend extends ByteBufferBackend {
private byte[] buffer;
/**
* <p>Constructor for RrdByteArrayBackend.</p>
*
* @param path a {@link java.lang.String} object.
*/
protected RrdByteArrayBackend(String path) {
super(path);
}
protected void setBuffer(byte[] buffer) {
this.buffer = buffer;
setByteBuffer(ByteBuffer.wrap(buffer));
}
protected byte[] getBuffer() {
return buffer;
}
/**
* <p>read.</p>
*
* @param offset a long.
* @param bytes an array of byte.
* @throws java.io.IOException if any.
* @throws java.lang.IllegalArgumentException if offset is bigger that the possible length.
*/
@Override
protected synchronized void read(long offset, byte[] bytes) throws IOException {
if (offset < 0 || offset > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Illegal offset: " + offset);
}
if (offset + bytes.length <= buffer.length) {
System.arraycopy(buffer, (int) offset, bytes, 0, bytes.length);
}
else {
throw new RrdBackendException("Not enough bytes available in RRD buffer; RRD " + getPath());
}
}
/**
* {@inheritDoc}
*
* @return Number of RRD bytes held in memory.
*/
public long getLength() {
return buffer.length;
}
/**
* {@inheritDoc}
*
* <p>It will reserves a memory section as a RRD storage.</p>
*
* @throws java.lang.IllegalArgumentException if length is bigger that the possible length.
*/
protected void setLength(long length) throws IOException {
if (length < 0 || length > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Illegal length: " + length);
}
buffer = new byte[(int) length];
setByteBuffer(ByteBuffer.wrap(buffer));
}
}
|
namespace Captura.ViewModels
{
public abstract class ViewModelBase : NotifyPropertyChanged { }
} |
# backend

Para obtener un nuevo .jar para el docker hacer click derecho, run as, maven install
Despues de esto tendremos el .jar compilado en la ruta introducida en el fichero Dockerfile
Para configurar el docker:
En la terminal con docker instalado y en la raiz de nuestro proyecto
- docker build -f Dockerfile -t dockerdemo .
- docker run dockerdemo
Para crear el contenedor usar script docker_build.sh
Para ejecutar el contenedor usar script docker_run.sh
El proyecto se compila con java 15 (JDK 15)
POSTGRES 13.2
|
%% -------------------------------------------------------------------
%%
%% riakhttpc: Riak HTTP Client
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Utility functions for datatypes.
-module(rhc_dt).
-export([
datatype_from_json/1,
encode_update_request/3,
decode_error/2
]).
-define(FIELD_PATTERN, "^(.*)_(counter|set|hll|register|flag|map)$").
datatype_from_json({struct, Props}) ->
Value = proplists:get_value(<<"value">>, Props),
Type = binary_to_existing_atom(proplists:get_value(<<"type">>, Props), utf8),
Context = proplists:get_value(<<"context">>, Props, undefined),
Mod = riakc_datatype:module_for_type(Type),
Mod:new(decode_value(Type, Value), Context).
decode_value(counter, Value) -> Value;
decode_value(set, Value) -> Value;
decode_value(gset, Value) -> Value;
decode_value(hll, Value) -> Value;
decode_value(flag, Value) -> Value;
decode_value(register, Value) -> Value;
decode_value(map, {struct, Fields}) ->
[ begin
{Name, Type} = field_from_json(Field),
{{Name,Type}, decode_value(Type, Value)}
end || {Field, Value} <- Fields ].
field_from_json(Bin) when is_binary(Bin) ->
{match, [Name, BinType]} = re:run(Bin, ?FIELD_PATTERN, [anchored, {capture, all_but_first, binary}]),
{Name, binary_to_existing_atom(BinType, utf8)}.
field_to_json({Name, Type}) when is_binary(Name), is_atom(Type) ->
BinType = atom_to_binary(Type, utf8),
<<Name/bytes, $_, BinType/bytes>>.
decode_error(fetch, {ok, "404", Headers, Body}) ->
case proplists:get_value("Content-Type", Headers) of
"application/json" ->
%% We need to extract the type when not found
{struct, Props} = mochijson2:decode(Body),
Type = binary_to_existing_atom(proplists:get_value(<<"type">>, Props), utf8),
{notfound, Type};
"text/" ++ _ ->
Body
end;
decode_error(_, {ok, "400", _, Body}) ->
{bad_request, Body};
decode_error(_, {ok, "301", _, Body}) ->
{legacy_counter, Body};
decode_error(_, {ok, "403", _, Body}) ->
{forbidden, Body};
decode_error(_, {ok, _, _, Body}) ->
Body.
encode_update_request(register, {assign, Bin}, _Context) ->
{struct, [{<<"assign">>, Bin}]};
encode_update_request(flag, Atom, _Context) ->
atom_to_binary(Atom, utf8);
encode_update_request(counter, Op, _Context) ->
{struct, [Op]};
encode_update_request(set, {update, Ops}, Context) ->
{struct, Ops ++ include_context(Context)};
encode_update_request(set, Op, Context) ->
{struct, [Op|include_context(Context)]};
encode_update_request(hll, {update, Ops}, Context) ->
{struct, Ops ++ include_context(Context)};
encode_update_request(hll, Op, Context) ->
{struct, [Op|include_context(Context)]};
encode_update_request(map, {update, Ops}, Context) ->
{struct, orddict:to_list(lists:foldl(fun encode_map_op/2, orddict:new(), Ops)) ++
include_context(Context)};
encode_update_request(gset, {update, Ops}, Context) ->
{struct, Ops ++ include_context(Context)};
encode_update_request(gset, Op, Context) ->
{struct, [Op|include_context(Context)]}.
encode_map_op({add, Entry}, Ops) ->
orddict:append(add, field_to_json(Entry), Ops);
encode_map_op({remove, Entry}, Ops) ->
orddict:append(remove, field_to_json(Entry), Ops);
encode_map_op({update, {_Key,Type}=Field, Op}, Ops) ->
EncOp = encode_update_request(Type, Op, undefined),
Update = {field_to_json(Field), EncOp},
case orddict:find(update, Ops) of
{ok, {struct, Updates}} ->
orddict:store(update, {struct, [Update|Updates]}, Ops);
error ->
orddict:store(update, {struct, [Update]}, Ops)
end.
include_context(undefined) -> [];
include_context(<<>>) -> [];
include_context(Bin) -> [{<<"context">>, Bin}].
|
//===-- PlatformAppleSimulator.cpp ----------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "PlatformAppleSimulator.h"
#if defined(__APPLE__)
#include <dlfcn.h>
#endif
#include <mutex>
#include <thread>
#include "lldb/Host/PseudoTerminal.h"
#include "lldb/Target/Process.h"
#include "lldb/Utility/LLDBAssert.h"
#include "lldb/Utility/Status.h"
#include "lldb/Utility/StreamString.h"
#include "llvm/Support/Threading.h"
using namespace lldb;
using namespace lldb_private;
#if !defined(__APPLE__)
#define UNSUPPORTED_ERROR ("Apple simulators aren't supported on this platform")
#endif
// Static Functions
void PlatformAppleSimulator::Initialize() { PlatformDarwin::Initialize(); }
void PlatformAppleSimulator::Terminate() { PlatformDarwin::Terminate(); }
/// Default Constructor
PlatformAppleSimulator::PlatformAppleSimulator()
: PlatformDarwin(true), m_core_sim_path_mutex(),
m_core_simulator_framework_path(), m_device() {}
/// Destructor.
///
/// The destructor is virtual since this class is designed to be
/// inherited from by the plug-in instance.
PlatformAppleSimulator::~PlatformAppleSimulator() {}
lldb_private::Status PlatformAppleSimulator::LaunchProcess(
lldb_private::ProcessLaunchInfo &launch_info) {
#if defined(__APPLE__)
LoadCoreSimulator();
CoreSimulatorSupport::Device device(GetSimulatorDevice());
if (device.GetState() != CoreSimulatorSupport::Device::State::Booted) {
Status boot_err;
device.Boot(boot_err);
if (boot_err.Fail())
return boot_err;
}
auto spawned = device.Spawn(launch_info);
if (spawned) {
launch_info.SetProcessID(spawned.GetPID());
return Status();
} else
return spawned.GetError();
#else
Status err;
err.SetErrorString(UNSUPPORTED_ERROR);
return err;
#endif
}
void PlatformAppleSimulator::GetStatus(Stream &strm) {
#if defined(__APPLE__)
// This will get called by subclasses, so just output status on the current
// simulator
PlatformAppleSimulator::LoadCoreSimulator();
std::string developer_dir = GetXcodeDeveloperDirectory().GetPath();
CoreSimulatorSupport::DeviceSet devices =
CoreSimulatorSupport::DeviceSet::GetAvailableDevices(
developer_dir.c_str());
const size_t num_devices = devices.GetNumDevices();
if (num_devices) {
strm.Printf("Available devices:\n");
for (size_t i = 0; i < num_devices; ++i) {
CoreSimulatorSupport::Device device = devices.GetDeviceAtIndex(i);
strm.Printf(" %s: %s\n", device.GetUDID().c_str(),
device.GetName().c_str());
}
if (m_device.hasValue() && m_device->operator bool()) {
strm.Printf("Current device: %s: %s", m_device->GetUDID().c_str(),
m_device->GetName().c_str());
if (m_device->GetState() == CoreSimulatorSupport::Device::State::Booted) {
strm.Printf(" state = booted");
}
strm.Printf("\nType \"platform connect <ARG>\" where <ARG> is a device "
"UDID or a device name to disconnect and connect to a "
"different device.\n");
} else {
strm.Printf("No current device is selected, \"platform connect <ARG>\" "
"where <ARG> is a device UDID or a device name to connect to "
"a specific device.\n");
}
} else {
strm.Printf("No devices are available.\n");
}
#else
strm.Printf(UNSUPPORTED_ERROR);
#endif
}
Status PlatformAppleSimulator::ConnectRemote(Args &args) {
#if defined(__APPLE__)
Status error;
if (args.GetArgumentCount() == 1) {
if (m_device)
DisconnectRemote();
PlatformAppleSimulator::LoadCoreSimulator();
const char *arg_cstr = args.GetArgumentAtIndex(0);
if (arg_cstr) {
std::string arg_str(arg_cstr);
std::string developer_dir = GetXcodeDeveloperDirectory().GetPath();
CoreSimulatorSupport::DeviceSet devices =
CoreSimulatorSupport::DeviceSet::GetAvailableDevices(
developer_dir.c_str());
devices.ForEach(
[this, &arg_str](const CoreSimulatorSupport::Device &device) -> bool {
if (arg_str == device.GetUDID() || arg_str == device.GetName()) {
m_device = device;
return false; // Stop iterating
} else {
return true; // Keep iterating
}
});
if (!m_device)
error.SetErrorStringWithFormat(
"no device with UDID or name '%s' was found", arg_cstr);
}
} else {
error.SetErrorString("this command take a single UDID argument of the "
"device you want to connect to.");
}
return error;
#else
Status err;
err.SetErrorString(UNSUPPORTED_ERROR);
return err;
#endif
}
Status PlatformAppleSimulator::DisconnectRemote() {
#if defined(__APPLE__)
m_device.reset();
return Status();
#else
Status err;
err.SetErrorString(UNSUPPORTED_ERROR);
return err;
#endif
}
lldb::ProcessSP PlatformAppleSimulator::DebugProcess(
ProcessLaunchInfo &launch_info, Debugger &debugger,
Target *target, // Can be NULL, if NULL create a new target, else use
// existing one
Status &error) {
#if defined(__APPLE__)
ProcessSP process_sp;
// Make sure we stop at the entry point
launch_info.GetFlags().Set(eLaunchFlagDebug);
// We always launch the process we are going to debug in a separate process
// group, since then we can handle ^C interrupts ourselves w/o having to
// worry about the target getting them as well.
launch_info.SetLaunchInSeparateProcessGroup(true);
error = LaunchProcess(launch_info);
if (error.Success()) {
if (launch_info.GetProcessID() != LLDB_INVALID_PROCESS_ID) {
ProcessAttachInfo attach_info(launch_info);
process_sp = Attach(attach_info, debugger, target, error);
if (process_sp) {
launch_info.SetHijackListener(attach_info.GetHijackListener());
// Since we attached to the process, it will think it needs to detach
// if the process object just goes away without an explicit call to
// Process::Kill() or Process::Detach(), so let it know to kill the
// process if this happens.
process_sp->SetShouldDetach(false);
// If we didn't have any file actions, the pseudo terminal might have
// been used where the slave side was given as the file to open for
// stdin/out/err after we have already opened the master so we can
// read/write stdin/out/err.
int pty_fd = launch_info.GetPTY().ReleaseMasterFileDescriptor();
if (pty_fd != PseudoTerminal::invalid_fd) {
process_sp->SetSTDIOFileDescriptor(pty_fd);
}
}
}
}
return process_sp;
#else
return ProcessSP();
#endif
}
FileSpec PlatformAppleSimulator::GetCoreSimulatorPath() {
#if defined(__APPLE__)
std::lock_guard<std::mutex> guard(m_core_sim_path_mutex);
if (!m_core_simulator_framework_path.hasValue()) {
if (FileSpec fspec = GetXcodeDeveloperDirectory()) {
std::string developer_dir = fspec.GetPath();
StreamString cs_path;
cs_path.Printf(
"%s/Library/PrivateFrameworks/CoreSimulator.framework/CoreSimulator",
developer_dir.c_str());
m_core_simulator_framework_path = FileSpec(cs_path.GetData());
FileSystem::Instance().Resolve(*m_core_simulator_framework_path);
}
}
return m_core_simulator_framework_path.getValue();
#else
return FileSpec();
#endif
}
void PlatformAppleSimulator::LoadCoreSimulator() {
#if defined(__APPLE__)
static llvm::once_flag g_load_core_sim_flag;
llvm::call_once(g_load_core_sim_flag, [this] {
const std::string core_sim_path(GetCoreSimulatorPath().GetPath());
if (core_sim_path.size())
dlopen(core_sim_path.c_str(), RTLD_LAZY);
});
#endif
}
#if defined(__APPLE__)
CoreSimulatorSupport::Device PlatformAppleSimulator::GetSimulatorDevice() {
if (!m_device.hasValue()) {
const CoreSimulatorSupport::DeviceType::ProductFamilyID dev_id =
CoreSimulatorSupport::DeviceType::ProductFamilyID::iPhone;
std::string developer_dir = GetXcodeDeveloperDirectory().GetPath();
m_device = CoreSimulatorSupport::DeviceSet::GetAvailableDevices(
developer_dir.c_str())
.GetFanciest(dev_id);
}
if (m_device.hasValue())
return m_device.getValue();
else
return CoreSimulatorSupport::Device();
}
#endif
|
# Support for the DAVIS240 camera.
# DAVIS240 has three different event types.
const DAVIS240_SIZE = (180,240)
""" DVS event (from event-based sensor) """
struct DAVIS240_DVS
y::Int16
x::Int16
ext::Bool
pol::Bool
end
""" APS packet (from frame-based sensor) """
struct DAVIS240_APS
y::Int16
x::Int16
kind::Int8
sample::Int16
end
""" IMU packet """
struct DAVIS240_IMU
channel::Int8
sample::Int8
end
DAVIS240_Any = Union{DAVIS240_DVS, DAVIS240_APS, DAVIS240_IMU}
""" Extracts the event subtype: DVS, APS or IMU """
event_subtype(data::UInt32) = if ((data >> 31) & 0x1 == 0) DAVIS240_DVS
elseif ((data >> 10) & 0x03) == 0x03 DAVIS240_IMU
else DAVIS240_APS
end
# Events can be interpreted as a specific class
# if they have the corresponding subtype:
isevent(T::Type{<:DAVIS240_Any}, e::Event{UInt32}) =
event_subtype(e.address) === T
event_pol(e::DAVIS240_DVS) = e.pol
event_coord(e::Union{DAVIS240_DVS,DAVIS240_APS}) = (e.x, e.y)
event_location(e::Union{DAVIS240_DVS,DAVIS240_APS}, imsize) = (DAVIS240_SIZE[1]-e.y, e.x+1)
image_size(e::Type{<:Union{DAVIS240_DVS,DAVIS240_APS}}) = DAVIS240_SIZE
function Base.convert(::Type{DAVIS240_DVS}, a::UInt32)
y = (a >> 22) & 0x01FF
x = (a >> 12) & 0x01FF
ext = (a >> 10) & 0x1
pol = (a >> 11) & 0x1
DAVIS240_DVS(y, x, ext, pol)
end
function Base.convert(::Type{DAVIS240_APS}, a::UInt32)
y = (a >> 22) & 0x01FF
x = (a >> 12) & 0x01FF
kind = (a >> 10) & 0x03
sample = a & 0x03FF
DAVIS240_APS(y, x, kind, sample)
end
function Base.convert(::Type{DAVIS240_IMU}, a::UInt32)
channel = (a >> 28) & 0x07
sample = (a >> 12) & 0x7fff
DAVIS240_IMU(channel, sample)
end
|
const fs = require("fs");
const jsonc = require("jsonc-parser");
module.exports = {
register(module, filename) {
const content = fs.readFileSync(filename, "utf8");
try {
module.exports = jsonc.parse(content);
} catch (err) {
err.message = filename + ": " + err.message;
throw err;
}
},
};
|
import os
import numpy as np
import pygame
from .maze import Maze
class MazeView2D:
def __init__(self, maze_name="Maze2D", maze_file_path=None,
maze_size=(30, 30), screen_size=(600, 600),
has_loops=False, num_portals=0):
# PyGame configurations
pygame.init()
pygame.display.set_caption(maze_name)
self.clock = pygame.time.Clock()
self.__game_over = False
# Load a maze
if maze_file_path is None:
self.__maze = Maze(maze_size=maze_size, has_loops=has_loops, num_portals=num_portals)
else:
if not os.path.exists(maze_file_path):
dir_path = os.path.dirname(os.path.abspath(__file__))
rel_path = os.path.join(dir_path, "maze_samples", maze_file_path)
if os.path.exists(rel_path):
maze_file_path = rel_path
else:
raise FileExistsError("Cannot find %s." % maze_file_path)
self.__maze = Maze(maze_cells=Maze.load_maze(maze_file_path))
self.maze_size = self.__maze.maze_size
# to show the right and bottom border
self.screen = pygame.display.set_mode(screen_size)
self.__screen_size = tuple(map(sum, zip(screen_size, (-1, -1))))
# Set the starting point
self.__entrance = np.zeros(2, dtype=int)
# Set the Goal
self.__goal = np.array(self.maze_size) - np.array((1, 1))
# Create the Robot
self.__robot = self.entrance
# Create a background
self.background = pygame.Surface(self.screen.get_size()).convert()
self.background.fill((255, 255, 255))
# Create a layer for the maze
self.maze_layer = pygame.Surface(self.screen.get_size()).convert_alpha()
self.maze_layer.fill((0, 0, 0, 0,))
# show the maze
self.__draw_maze()
# show the portals
self.__draw_portals()
# show the robot
self.__draw_robot()
# show the entrance
self.__draw_entrance()
# show the goal
self.__draw_goal()
def render(self, mode="human"):
try:
img_output = self.__view_update(mode)
self.__controller_update()
except Exception as e:
self.__game_over = True
self.quit_game()
raise e
else:
return img_output
def quit_game(self):
try:
self.__game_over = True
pygame.display.quit()
pygame.quit()
except Exception:
pass
def can_move_robot(self, dir):
if dir not in self.__maze.COMPASS.keys():
raise ValueError("dir cannot be %s. The only valid dirs are %s."
% (str(dir), str(self.__maze.COMPASS.keys())))
return self.__maze.is_open(self.__robot, dir)
def move_robot(self, dir):
if dir not in self.__maze.COMPASS.keys():
raise ValueError("dir cannot be %s. The only valid dirs are %s."
% (str(dir), str(self.__maze.COMPASS.keys())))
if self.__maze.is_open(self.__robot, dir):
# update the drawing
self.__draw_robot(transparency=0)
# move the robot
self.__robot += np.array(self.__maze.COMPASS[dir])
# if it's in a portal afterward
if self.maze.is_portal(self.robot):
portal = self.maze.get_portal(tuple(self.robot))
self.__robot = np.array(portal.teleport(tuple(self.robot)))
self.__draw_robot(transparency=255)
def reset_robot(self):
self.__draw_robot(transparency=0)
self.__robot = np.zeros(2, dtype=int)
self.__draw_robot(transparency=255)
def __controller_update(self):
if not self.__game_over:
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.__game_over = True
self.quit_game()
def __view_update(self, mode="human"):
if not self.__game_over:
# update the robot's position
self.__draw_entrance()
self.__draw_goal()
self.__draw_portals()
self.__draw_robot()
# update the screen
self.screen.blit(self.background, (0, 0))
self.screen.blit(self.maze_layer, (0, 0))
if mode == "human":
pygame.display.flip()
return np.flipud(np.rot90(pygame.surfarray.array3d(pygame.display.get_surface())))
def __draw_maze(self):
line_colour = (0, 0, 0, 255)
# drawing the horizontal lines
for y in range(self.maze.MAZE_H + 1):
pygame.draw.line(self.maze_layer, line_colour, (0, y * self.CELL_H),
(self.SCREEN_W, y * self.CELL_H))
# drawing the vertical lines
for x in range(self.maze.MAZE_W + 1):
pygame.draw.line(self.maze_layer, line_colour, (x * self.CELL_W, 0),
(x * self.CELL_W, self.SCREEN_H))
# breaking the walls
for x in range(len(self.maze.maze_cells)):
for y in range(len(self.maze.maze_cells[x])):
# check the which walls are open in each cell
walls_status = self.maze.get_walls_status(self.maze.maze_cells[x, y])
dirs = ""
for dir, open in walls_status.items():
if open:
dirs += dir
self.__cover_walls(x, y, dirs)
def __cover_walls(self, x, y, dirs, colour=(0, 0, 255, 15)):
dx = x * self.CELL_W
dy = y * self.CELL_H
if not isinstance(dirs, str):
raise TypeError("dirs must be a str.")
for dir in dirs:
if dir == "S":
line_head = (dx + 1, dy + self.CELL_H)
line_tail = (dx + self.CELL_W - 1, dy + self.CELL_H)
elif dir == "N":
line_head = (dx + 1, dy)
line_tail = (dx + self.CELL_W - 1, dy)
elif dir == "W":
line_head = (dx, dy + 1)
line_tail = (dx, dy + self.CELL_H - 1)
elif dir == "E":
line_head = (dx + self.CELL_W, dy + 1)
line_tail = (dx + self.CELL_W, dy + self.CELL_H - 1)
else:
raise ValueError("The only valid directions are (N, S, E, W).")
pygame.draw.line(self.maze_layer, colour, line_head, line_tail)
def __draw_robot(self, colour=(0, 0, 150), transparency=255):
x = int(self.__robot[0] * self.CELL_W + self.CELL_W * 0.5 + 0.5)
y = int(self.__robot[1] * self.CELL_H + self.CELL_H * 0.5 + 0.5)
r = int(min(self.CELL_W, self.CELL_H)/5 + 0.5)
pygame.draw.circle(self.maze_layer, colour + (transparency,), (x, y), r)
def __draw_entrance(self, colour=(0, 0, 150), transparency=235):
self.__colour_cell(self.entrance, colour=colour, transparency=transparency)
def __draw_goal(self, colour=(150, 0, 0), transparency=235):
self.__colour_cell(self.goal, colour=colour, transparency=transparency)
def __draw_portals(self, transparency=160):
colour_range = np.linspace(0, 255, len(self.maze.portals), dtype=int)
colour_i = 0
for portal in self.maze.portals:
colour = ((100 - colour_range[colour_i])% 255, colour_range[colour_i], 0)
colour_i += 1
for location in portal.locations:
self.__colour_cell(location, colour=colour, transparency=transparency)
def __colour_cell(self, cell, colour, transparency):
if not (isinstance(cell, (list, tuple, np.ndarray)) and len(cell) == 2):
raise TypeError("cell must a be a tuple, list, or numpy array of size 2")
x = int(cell[0] * self.CELL_W + 0.5 + 1)
y = int(cell[1] * self.CELL_H + 0.5 + 1)
w = int(self.CELL_W + 0.5 - 1)
h = int(self.CELL_H + 0.5 - 1)
pygame.draw.rect(self.maze_layer, colour + (transparency,), (x, y, w, h))
@property
def maze(self):
return self.__maze
@property
def robot(self):
return self.__robot
@property
def entrance(self):
return self.__entrance
@property
def goal(self):
return self.__goal
@property
def game_over(self):
return self.__game_over
@property
def SCREEN_SIZE(self):
return tuple(self.__screen_size)
@property
def SCREEN_W(self):
return int(self.SCREEN_SIZE[0])
@property
def SCREEN_H(self):
return int(self.SCREEN_SIZE[1])
@property
def CELL_W(self):
return float(self.SCREEN_W) / float(self.maze.MAZE_W)
@property
def CELL_H(self):
return float(self.SCREEN_H) / float(self.maze.MAZE_H)
if __name__ == "__main__":
maze = MazeView2D(screen_size=(500, 500), maze_size=(10, 10))
maze.update()
input("Enter any key to quit.")
|
package ba.sake.scalarizmi.misc.max_subarray
/**
* Find the nonempty, contiguous subarray of `array` whose values have the largest sum.
*/
trait MaximumSubarrayAlgorithm {
/** @return (start-index, end-index, sum) */
def sort(array: Array[Int]): (Int, Int, Int)
/** @return Name of the algorithm */
def name = getClass.getName
}
|
package digital.capsa.archetypes.eventbus.data
import com.fasterxml.jackson.annotation.JsonTypeName
import digital.capsa.archetypes.core.aggregates.MemberId
@JsonTypeName("MemberRegistered")
class MemberRegistered(
id: MemberId,
var firstName: String,
var lastName: String,
var email: String,
var phone: String? = null
) : MemberEventData(
id = id
)
|
using FW.Marketplace.Interfaces;
using FW.Marketplace.ViewModel;
using Microsoft.AspNetCore.Mvc;
using System.Threading.Tasks;
namespace FW.Marketplace.Controllers
{
[Route("[controller]")]
public class AccountController : Controller
{
IAccountService _accountService;
public AccountController(IAccountService accountService)
{
_accountService = accountService;
}
[HttpPost("login")]
public async Task<IActionResult> Login([FromBody]LoginViewModel model)
{
var result = await _accountService.LoginAsync(model);
return Ok(result);
}
}
}
|
#!/bin/sh -e
. $HOME/lib/sh/stdlib.sh
usage() (die 100 usage 'imgcat [filename ...]')
if base64 --version 2>&1 | grep -Eiq 'gnu|fourmilab'; then
decode() (exec base64 -d ${1+"$@"})
else
decode() (exec base64 -D ${1+"$@"})
fi
print_image() {
data=`base64`
if test x"$1" = x; then
name=
else
printf '%s\n' "$1"
name='name='`printf %s "$1" | base64`';'
fi
size=`printf %s "$data" | decode | wc -c`
printf '\033]1337;File=%ssize=%d;inline=1:%s\007\n' "$name" "$size" "$data"
}
if `isatty 0`; then
test $# -gt 0 || usage
for file do
test -e "$file" || die 111 fatal "$file: No such file or directory"
test -d "$file" && continue
print_image "$file" <"$file"
done
else
test $# -le 1 || usage
print_image "$1"
fi
|
4 月 13 日上海新增本土确诊病例 2573 例、无症状感染者 25146 例,目前情况如何?
前几天去华为面试,后来说通过了,但是 HR 告诉我签约签的是华为慧通的,我该不该去?
网传微软亚洲研究院(MSRA)停招国防七子及北邮学生,是否属实?这将产生什么影响?
俄罗斯国防部称俄「莫斯科」号导弹巡洋舰发生火灾,受损严重,事故原因或是什么?这会造成哪些影响?
大人也是经历过婚姻的,也明知道可能会不幸福,为什么还会催促自己的孩子步入像他们的生活呢?
上海某小区的一场以可乐开头的换物接龙火了,疫情期间小区居民之间应该如何互助?
如何看待 4 月 12 日世卫「奥密克戎导致大量住院和死亡病例」报告,在中文网络引发的争议?
如何看待腾讯加速器停止加速外服游戏业务,仅支持国服游戏加速?
如何看待网传一骑手「不加价不给送」,骑手回应以为是米饭结果是袋装大米?
如何评价美媒夸赞深圳《又一个中国城市的十亿美元富豪人数超过纽约》一文?
如何看待「873 名优秀运动员获本科保送推荐,樊振东、朱易在列」,这对运动员们会产生什么影响?
什么样的小说才能改编成大 ip?
如何看待 90 后人大女硕士返乡种果树并用短视频科普农技?做一名农业主播有前途吗?
凤凰卫视台湾站因被民进党当局认定为陆资企业,将于 5 月关站,将产生哪些影响?
网传上海徐汇永康市民向居委打电话求救,当地居委回应「老人是阳性感染者,已送医就诊」,目前进展如何?
世卫组织称 HPV 疫苗单剂接种即可预防宫颈癌 ,这意味着什么?将对疫苗行业带来哪些影响?
俄军出动重型装备警告芬兰不要加入北约,芬兰怎么做才会更安全?
如何看待 iPhone SE 3 在中国销量惨淡,在日本市场卖爆的现象?
《三十而已》为什么顾佳的结局最差?
网传上海虹口区卫健委钱文雄夫人自杀,上海警方回应系谣言,造谣者该承担怎样的责任?
2022 LPL 春季赛复赛后连续两个让二追三,有没有是假赛的可能?
网传科大讯飞给员工颁发的「毕业证」曝光,此举是普通离职信还是暗示裁员?真实情况是怎样的?
康师傅回应老坛酸菜面重新上架,称「涉及插旗产品已全部下架」,你还会信任康师傅这一品牌吗?
如何看待河南顾客在网店购物时,遭网店客服人员辱骂「河南省就是贫民窟」,网店回应「情绪上头,已离职」?
新冠病毒有哪些经过证实大量存在的后遗症?
网传特斯拉 Model Y 侧后方被撞维修费近 20 万,一体式铸造车身售后那么贵为什么还被采用?
上海网信办联合公安部门连续查处多起涉疫谣言,这起到了什么警示作用?
如何看待厦门海关在进境空箱中截获 2000 只活体蟑螂,这种病媒生物有多大危害?
国常会提出「适时运用降准等货币政策工具」,这主要出于哪些考虑?降准将产生哪些影响?
有什么甜文小说推荐?
独董刘姝威表示,柔宇科技产线投片良率达 81.6% ,建议各级政府「拯救柔宇」,哪些信息值得关注?
看了《亲爱的小孩》你有什么感受?
有哪些超级好看的手机壁纸?
大秦铁路天津段两辆货运火车发生碰撞,目前暂无人员伤亡报告,事故原因可能是什么?可能带来哪些影响?
为什么很多游戏里主角是救世主 / 战神,但还是要安排他们去做很多无聊且乏味的任务?
上海疾控回应小区封控已久为何还出现阳性感染者,因数据延时、家庭传播较多,封控期间还需注意哪些防控细节?
上海一方舱医院 6740 人出院,成本轮疫情单日单院最多出院人数,这说明了什么?疫情拐点到来了吗?
2022 LPL 春季胜者组决赛 TES 与 RNG「全华班」对决,你更看好谁?
神舟十三号航天员即将返回地球,中国航天再次取得新突破。环顾世界,如何评价各国航天事业发展?
饶毅评梁建章的抗疫策略,认为不应只看统计学规律,还需考虑突变不确定性,「病毒越流行毒性越弱」是误解吗?
看了 LPL 两场让二追三的半决赛以后,如何评价 LPL 的实力,跟 LCK 与 LEC 比能排到什么位置?
为什么有些人心智成熟的比较晚呢?
为什么大家推荐耳机都不提漫步者,全都是森海塞尔,AKG 之类的?
大家伙儿在游戏《三国杀》里遇到最令你生气的时刻是哪一个?
如何评价椰树集团再发争议招聘广告「入学就有车、有房、有高薪」?
英伟达旗舰 RTX 3080 Ti 跌破首发价,这意味着什么?释放了哪些信号?
vivo X Fold 折叠屏手机用起来是一种什么体验?
中证协等三协会倡议「坚决遏制 NFT 金融化证券化倾向,从严防范非法金融活动风险」,透露出什么信号?
最近疫情,有哪些适合长期保存的蔬菜或水果可以囤在家?
如何看待有媒体称 OPPO 会采用 TSMC(台积电)的 4nm 工艺流片旗下第一枚 AP?有哪些值得关注的信息?
|
#!/bin/bash -e
# Executes ORT's Analyzer to determines the dependencies of projects and their metadata,
# abstracting which package managers or build systems are actually being used.
if [ "$VCS_TYPE" = "git" ]; then
pushd $PROJECT_DIR
if [ "$DISABLE_SHALLOW_CLONE" = "true" ]; then
echo "Unshallowing the cloned project..."
git fetch --unshallow
else
# Fetch tags for deepened commits
tags_list=$(mktemp)
refs_list=$(mktemp)
git ls-remote --tags > $tags_list
git log --pretty=format:"%H" > $refs_list
grep -Ff $refs_list $tags_list | awk '{print $2}' | sed 's/\^{}//' | xargs -r -I tag git fetch origin "tag:tag"
rm $refs_list $tags_list
fi
popd
fi
ORT_ANALYZER_OPTIONS="--package-curations-file $ORT_CONFIG_CURATIONS_FILE"
if [[ -n "$ORT_CONFIG_FILE" ]]; then
ORT_ANALYZER_OPTIONS="$ORT_ANALYZER_OPTIONS --repository-configuration-file $CI_PROJECT_DIR/$ORT_CONFIG_FILE";
fi
if [[ "$ORT_ALLOW_DYNAMIC_VERSIONS" = "true" ]]; then
ORT_OPTIONS="-P ort.analyzer.allowDynamicVersions=true"
fi
if [[ "$VCS_TYPE" = "git-repo" ]]; then
ANALYZER_INPUT_DIR="$PROJECT_DIR"
else
ANALYZER_INPUT_DIR="${PROJECT_DIR}${VCS_PATH:+/$VCS_PATH}"
fi
# Convert the LABELS variable with format "key1=val1, key1=val2" into separate ORT label options (-l).
[[ -z "$LABELS" ]] || LABEL_OPTIONS=$(${CI_PROJECT_DIR}/scripts/labels.sh)
echo "------------------------------------------"
echo "Running ORT analyzer..."
echo "------------------------------------------"
$ORT \
--$ORT_LOG_LEVEL \
--stacktrace \
$ORT_OPTIONS \
analyze \
$ORT_ANALYZER_OPTIONS \
-i $ANALYZER_INPUT_DIR \
-o $ORT_RESULTS_DIR \
-f JSON \
-l GITLAB_PIPELINE_URL="$CI_PIPELINE_URL" \
-l SW_NAME="$SW_NAME" \
-l SW_VERSION="$SW_VERSION" \
-l VCS_TYPE="$VCS_TYPE" \
-l VCS_URL="$VCS_URL" \
-l VCS_REVISION="$VCS_REVISION" \
-l VCS_PATH="$VCS_PATH" \
-l LABELS="$LABELS" \
-l ORT_CONFIG_FILE="$ORT_CONFIG_FILE" \
-l ORT_CONFIG_REPO_SSH_URL="$ORT_CONFIG_REPO_SSH_URL" \
-l ORT_CONFIG_REVISION="$ORT_CONFIG_REVISION" \
-l ALLOW_DYNAMIC_VERSIONS="$ORT_ALLOW_DYNAMIC_VERSIONS" \
-l DISABLE_SHALLOW_CLONE="$DISABLE_SHALLOW_CLONE" \
-l ORT_LOG_LEVEL="$ORT_LOG_LEVEL" \
-l ORT_VERSION="$ORT_VERSION" \
-l UPSTREAM_BRANCH="$UPSTREAM_BRANCH" \
-l UPSTREAM_PROJECT_PATH="$UPSTREAM_PROJECT_PATH" \
-l UPSTREAM_PROJECT_TITLE="$UPSTREAM_PROJECT_TITLE" \
-l UPSTREAM_PROJECT_ID="$UPSTREAM_PROJECT_ID" \
-l UPSTREAM_USER_LOGIN="$UPSTREAM_USER_LOGIN" \
-l UPSTREAM_PROJECT_URL="$UPSTREAM_PROJECT_URL" \
-l UPSTREAM_MERGE_REQUEST_IID="$UPSTREAM_MERGE_REQUEST_IID" \
-l UPSTREAM_PIPELINE_URL="$UPSTREAM_PIPELINE_URL" \
-l CI_PIPELINE_URL="$CI_PIPELINE_URL" \
-l CI_JOB_URL="$CI_JOB_URL"
$LABEL_OPTIONS
|
use heapless::{String, Vec};
use crate::console::{ack, error, info};
pub const MAX_TOKENS: usize = 4;
pub const MSG_SIZE: usize = 64;
#[derive(PartialEq, Eq)]
pub enum CommandResult<'a> {
Error(&'a str),
Handled,
Result(String<MSG_SIZE>),
InvalidArguments,
NotHandled,
}
pub struct Parser {
resp: String<MSG_SIZE>,
}
impl Parser {
pub fn new() -> Parser {
let resp: String<MSG_SIZE> = String::new();
Self { resp }
}
fn assemble(&mut self, tokens: &Vec<&str, MAX_TOKENS>) -> &str {
self.resp.clear();
for t in tokens.iter().take(tokens.len() - 1) {
let _ = self.resp.push_str(t);
let _ = self.resp.push(' ');
}
let _ = self.resp.push_str(tokens[tokens.len() - 1]);
self.resp.as_str()
}
pub fn handle_result(&mut self, tokens: &Vec<&str, MAX_TOKENS>, result: CommandResult) -> bool {
match result {
CommandResult::Error(msg) => {
self.resp.clear();
let _ = self.resp.push_str(tokens[0]);
let _ = self.resp.push_str(": ");
let _ = self.resp.push_str(msg);
error(self.resp.as_str());
},
CommandResult::Handled => {
ack(tokens[0]);
},
CommandResult::Result(msg) => {
self.resp.clear();
let _ = self.resp.push_str(tokens[0]);
let _ = self.resp.push_str(": ");
let _ = self.resp.push_str(msg.as_str());
info(self.resp.as_str());
},
CommandResult::InvalidArguments => {
self.resp.clear();
let _ = self.resp.push_str(tokens[0]);
let _ = self.resp.push_str(": invalid arguments");
error(self.resp.as_str());
},
CommandResult::NotHandled => {
return false;
},
}
return true;
}
pub fn process(
&mut self,
tokens: &Vec<&str, MAX_TOKENS>
) -> bool {
if tokens.len() > 0 {
if tokens[0] == "echo" {
info(self.assemble(&tokens));
} else if tokens[0] == "panic" {
let x = [0, 1, 2];
let i = x.len() + 1;
let _y = x[i];
} else {
self.resp.clear();
let _ = self.resp.push_str(tokens[0]);
let _ = self.resp.push_str(": unknown command");
error(self.resp.as_str());
return false;
}
ack(tokens[0]);
}
return true;
}
pub fn tokenize<'a>(&self, cmd: &'a String<MSG_SIZE>) -> Option<Vec<&'a str, MAX_TOKENS>> {
let mut tokens: Vec<&str, MAX_TOKENS> = Vec::new();
for token in cmd.split_ascii_whitespace() {
let _ = tokens.push(token);
}
if tokens.len() <= 0 {
return None;
}
Some(tokens)
}
}
|
---
title: Elasticsearch Service
---
{% capture overview %}
{% endcapture %}
Very basic implementation of a Elasticsearch service. The service will capture all requests.
#### Configuration
```
[service.elasticsearch]
type="elasticsearch"
port="tcp/9200"
name="AW2LChf"
cluster_name="elasticsearch"
cluster_uuid="ay20oRi4SHmlOPAyTrPh6A"
```
|
create table tbl(i int) partition by range(i) (partition p1 values less than(3),partition p2 values less than(6));
insert into tbl values(1),(2),(3),(4),(5);
alter table tbl reorganize partition p1,p2 into(partition p12 values less than(6));
alter table tbl reorganize partition p12 into (partition p11 values less than(2),partition p22 values less than(6));
select * from tbl order by 1;
drop tbl;
create table tbl(i int) partition by range(i) (partition p0 values less than(10),partition p1 values less than(20));
insert into tbl values(5),(15);
alter table tbl promote partition p0;
alter table tbl reorganize partition p1 into(partition p2 values less than(15),partition p3 values less than(20));
select * from tbl order by 1;
drop tbl;
drop tbl__p__p0;
create table tbl(d date) partition by hash(d) partitions 5;
insert into tbl select adddate('2012-2-2',rownum) from db_class limit 10;
create index i on tbl(d) where d>'2012-1-1';
alter table tbl coalesce partition 3;
alter table tbl add partition partitions 2;
select * from tbl order by 1;
alter table tbl remove partitioning;
alter table tbl partition by list(d) (
partition p0 values in ('2012-2-3','2012-2-4','2012-2-5','2012-2-6'),
partition p1 values in ('2012-2-7','2012-2-8'),
partition p2 values in('2012-2-9','2012-2-11','2012-2-10','2012-2-12'));
select * from tbl order by 1;
alter table tbl promote partition p2;
alter table tbl reorganize partition p1 into (
partition p1 values in ('2012-2-7','2012-2-8'),
partition p12 values in(NULL));
select * from tbl order by 1 desc;
drop tbl;
drop tbl__p__p2;
|
import {expect, mockTabris, restore, stub} from '../test';
import ClientMock from './ClientMock';
import NativeObject from '../../src/tabris/NativeObject';
import WidgetCollection from '../../src/tabris/WidgetCollection';
import {types} from '../../src/tabris/property-types';
import {omit} from '../../src/tabris/util';
import Color from '../../src/tabris/Color';
import Image from '../../src/tabris/Image';
import LinearGradient from '../../src/tabris/LinearGradient';
describe('property-types', function() {
// Allow creating instances of NativeObject
class CustomNativeObject extends NativeObject {
get _nativeType() { return 'CustomNativeObject'; }
}
beforeEach(function() {
const client = new ClientMock();
mockTabris(client);
});
afterEach(restore);
describe('ColorValue', function() {
it('encode translates "initial" to `undefined`', function() {
expect(types.ColorValue.encode('initial')).to.equal(undefined);
});
it('encode translates `null` to `undefined`', function() {
expect(types.ColorValue.encode(null)).to.equal(undefined);
});
it('decode translates `null` to `initial`', function() {
expect(types.ColorValue.decode(null)).to.equal('initial');
});
});
describe('shader', function() {
it('encode translates "initial" to `undefined`', function() {
expect(types.shader.encode('initial')).to.equal(undefined);
});
it('encode translates `null` to `undefined`', function() {
expect(types.shader.encode(null)).to.equal(undefined);
});
it('encode throws for invalid values', function() {
expect(() => types.shader.encode(12)).to.throw('12 must be a valid LinearGradientValue or ColorValue');
expect(() => {
types.shader.encode('linear-gradient(to right bottom, blue, red)');
}).to.throw('Invalid direction "right bottom". Corners are not supported.');
});
it('encode converts linear gradient value to a linear gradient shader', function() {
const shader = types.shader.encode('linear-gradient(red -30%, blue)');
expect(shader.type).to.equal('linearGradient');
expect(shader.angle).to.equal(180);
expect(shader.colors[0]).to.deep.equal([Color.red.toArray(), -0.3]);
expect(shader.colors[1]).to.deep.equal([Color.blue.toArray(), null]);
});
it('encode converts linear gradient value to a linear gradient shader', function() {
const shader = types.shader.encode('linear-gradient(red -30%, blue)');
expect(shader.type).to.equal('linearGradient');
expect(shader.angle).to.equal(180);
expect(shader.colors[0]).to.deep.equal([Color.red.toArray(), -0.3]);
expect(shader.colors[1]).to.deep.equal([Color.blue.toArray(), null]);
});
it('encode converts image object to image shader', function() {
const shader = types.shader.encode({src: 'foo.png'});
expect(shader.type).to.equal('image');
expect(shader.image).to.deep.equal(['foo.png', null, null, null]);
});
it('encode converts image object with scale to image shader', function() {
const shader = types.shader.encode({src: 'foo.png', scale: 2});
expect(shader.type).to.equal('image');
expect(shader.image).to.deep.equal(['foo.png', null, null, 2]);
});
it('encode converts image object with width and height to image shader', function() {
const shader = types.shader.encode({src: 'foo.png', width: 200, height: 100});
expect(shader.type).to.equal('image');
expect(shader.image).to.deep.equal(['foo.png', 200, 100, null]);
});
it('encode converts image object with width only to image shader', function() {
const shader = types.shader.encode({src: 'foo.png', width: 200});
expect(shader.type).to.equal('image');
expect(shader.image).to.deep.equal(['foo.png', 200, null, null]);
});
it('encode converts image string to image shader', function() {
const shader = types.shader.encode('foo.png');
expect(shader.type).to.equal('image');
expect(shader.image).to.deep.equal(['foo.png', null, null, null]);
});
it('decode converts falsy to "initial"', function() {
expect(types.shader.decode(null)).to.equal('initial');
});
it('decode converts linear gradient shader to a LinearGradient', function() {
const shader = {
type: 'linearGradient',
colors: [[Color.red.toArray(), -0.3], [Color.blue.toArray(), null]],
angle: 180
};
const linearGradient = types.shader.decode(shader);
expect(linearGradient).to.be.instanceof(LinearGradient);
expect(linearGradient.direction).to.equal(180);
expect(linearGradient.colorStops).to.deep.equal([[Color.red, {percent: -30}], Color.blue]);
});
it('decode converts color shader to Color', function() {
const color = types.shader.decode({color: [0, 0, 255, 255], type: 'color'});
expect(color).to.be.instanceof(Color);
expect(color.toString()).to.equal('rgb(0, 0, 255)');
});
it('decode converts image shader to Image', function() {
const image = types.shader.decode({image: ['foo.png', null, null, null], type: 'image'});
expect(image).to.be.instanceof(Image);
expect(image.src).to.equal('foo.png');
});
it('decodes an encoded gradient shader', function() {
const encodedShader = types.shader.encode('linear-gradient(red -30%, blue)');
const decodedShader = types.shader.decode(encodedShader);
expect(decodedShader).to.be.instanceof(LinearGradient);
expect(decodedShader).to.deep.equal({colorStops: [[Color.red, {percent: -30}], Color.blue], direction: 180});
});
it('decodes an encoded color shader', function() {
const encodedShader = types.shader.encode(new Color(0, 1, 2));
const decodedShader = types.shader.decode(encodedShader);
expect(decodedShader).to.be.instanceof(Color);
expect(decodedShader.toString()).to.equal('rgb(0, 1, 2)');
});
it('decodes an encoded image shader', function() {
const encodedShader = types.shader.encode(new Image({src: 'foo'}));
const decodedShader = types.shader.decode(encodedShader);
expect(decodedShader).to.be.instanceof(Image);
expect(decodedShader.src).to.equal('foo');
});
});
describe('font', function() {
it('encode translates "initial" to `undefined`', function() {
expect(types.FontValue.encode('initial')).to.equal(undefined);
});
it('encode translates `null` to `undefined`', function() {
expect(types.FontValue.encode(null)).to.equal(undefined);
});
});
describe('NativeObject', function() {
const encode = types.NativeObject.encode;
const decode = types.NativeObject.decode;
it('translates widgets to ids in properties', function() {
const value = new CustomNativeObject();
expect(encode(value)).to.equal(value.cid);
});
it('translates widget collection to first ids in properties', function() {
const value = new WidgetCollection([new CustomNativeObject()]);
expect(encode(value)).to.equal(value[0].cid);
});
it('does not translate objects with id field to ids', function() {
const value = {id: '23', name: 'bar'};
expect(encode(value)).to.equal(value);
});
it('translates ids to widgets', function() {
const value = new CustomNativeObject();
expect(decode(value.cid)).to.equal(value);
});
});
describe('image', function() {
const encode = types.ImageValue.encode;
const decode = types.ImageValue.decode;
it('succeeds for minimal image value', function() {
stub(console, 'warn');
const result = encode({src: 'foo.png'});
expect(result).to.eql(['foo.png', null, null, null]);
expect(console.warn).not.to.have.been.called;
});
it('succeeds for image with width and height', function() {
stub(console, 'warn');
const result = encode({src: 'foo.png', width: 10, height: 10});
expect(result).to.eql(['foo.png', 10, 10, null]);
expect(console.warn).not.to.have.been.called;
});
it('succeeds for image with scale', function() {
stub(console, 'warn');
const result = encode({src: 'foo.png', scale: 1.4});
expect(result).to.eql(['foo.png', null, null, 1.4]);
expect(console.warn).not.to.have.been.called;
});
it('succeeds for string', function() {
expect(encode('foo.jpg')).to.eql(['foo.jpg', null, null, null]);
});
it('succeeds for string with scale detection via file name', function() {
expect(encode('[email protected]')).to.eql(['[email protected]', null, null, 2]);
expect(encode('[email protected]')).to.eql(['[email protected]', null, null, 1.4]);
expect(encode('[email protected]')).to.eql(['[email protected]', null, null, null]);
expect(encode('foo2x.jpg')).to.eql(['foo2x.jpg', null, null, null]);
expect(encode('foo2x.jpg')).to.eql(['foo2x.jpg', null, null, null]);
});
it('succeeds for object with scale detection via file name', function() {
expect(encode({src: '[email protected]'})).to.eql(['[email protected]', null, null, 2]);
expect(encode({src: '[email protected]'})).to.eql(['[email protected]', null, null, 1.4]);
expect(encode({src: '[email protected]'})).to.eql(['[email protected]', null, null, null]);
expect(encode({src: 'foo2x.jpg'})).to.eql(['foo2x.jpg', null, null, null]);
expect(encode({src: 'foo2x.jpg'})).to.eql(['foo2x.jpg', null, null, null]);
});
it('overrides scale detection with explicit scale or dimensions', function() {
expect(encode({src: '[email protected]', scale: 1})).to.eql(['[email protected]', null, null, 1]);
expect(encode({src: '[email protected]', width: 10})).to.eql(['[email protected]', 10, null, null]);
expect(encode({src: '[email protected]', height: 10})).to.eql(['[email protected]', null, 10, null]);
});
it('has no scale detection for scale pattern in path', function() {
expect(encode('foo@2x/bar.jpg')).to.eql(['foo@2x/bar.jpg', null, null, null]);
expect(encode('foo@3x/[email protected]')).to.eql(['foo@3x/[email protected]', null, null, 2]);
});
it('succeeds for null', function() {
expect(encode(null)).to.be.null;
});
it('decodes array with scale to Image', function() {
const image = decode(['foo', null, null, 2]);
expect(image).to.be.instanceof(Image);
expect(image.src).to.equal('foo');
expect(image.scale).to.equal(2);
});
it('decodes array with dimensions to Image', function() {
const image = decode(['foo', 100, 200, null]);
expect(image).to.be.instanceof(Image);
expect(image.src).to.equal('foo');
expect(image.width).to.equal(100);
expect(image.height).to.equal(200);
});
it('fails if image value is not an object', function() {
expect(() => {
encode(23);
}).to.throw(Error, '23 is not a valid ImageValue');
});
it('fails if src is undefined', function() {
expect(() => {
encode({});
}).to.throw(Error, '"src" missing');
});
it('fails if src is empty string', function() {
expect(() => {
encode({src: ''});
}).to.throw(Error, '"src" must not be empty');
});
it('fails if src contains invalid ../ segments', function() {
expect(() => {
encode({src: '../test.png'});
}).to.throw(Error, 'Invalid image "src": Path must not start with ".."');
});
it('fails if width/height/scale values are invalid number', function() {
const goodValues = [0, 1, 1 / 3, 0.5, Math.PI];
const badValues = [-1, NaN, 1 / 0, -1 / 0, '1', true, false, {}];
const props = ['width', 'height', 'scale'];
const checkWith = function(prop, value) {
const image = {src: 'foo'};
image[prop] = value;
encode(image);
};
props.forEach((prop) => {
goodValues.forEach((value) => {
expect(() => checkWith(prop, value)).not.to.throw();
});
badValues.forEach((value) => {
expect(() => checkWith(prop, value)).to.throw(Error, `"${prop}" is not a dimension`);
});
});
});
it('warns if scale and width are given', function() {
stub(console, 'warn');
encode.call(class Foo {}, {src: 'foo.png', width: 23, scale: 2});
expect(console.warn).to.have.been.calledWithMatch(
'Foo: image "scale" is ignored when "width" and/or "height" are set to a number'
);
});
it('warns if scale and height are given', function() {
stub(console, 'warn');
encode.call(class Foo {}, {src: 'foo.png', height: 23, scale: 2});
expect(console.warn).to.have.been.calledWithMatch(
'Foo: image "scale" is ignored when "width" and/or "height" are set to a number'
);
});
});
describe('boolean', function() {
const encode = types.boolean.encode;
const decode = types.boolean.decode;
it('passes through true', function() {
expect(encode(true)).to.equal(true);
});
it('passes through false', function() {
expect(encode(false)).to.equal(false);
});
it('translates falsy values', function() {
expect(encode(null)).to.equal(false);
expect(encode('')).to.equal(false);
expect(encode(undefined)).to.equal(false);
expect(encode(0)).to.equal(false);
});
it('translates truthy values', function() {
expect(encode(1)).to.equal(true);
expect(encode({})).to.equal(true);
expect(encode('true')).to.equal(true);
});
it('decodes undefined to false', function() {
expect(decode(undefined)).to.equal(false);
});
});
describe('string', function() {
const encode = types.string.encode;
const decode = types.string.decode;
it('translates null to empty string', function() {
expect(encode(null)).to.equal('');
expect(encode(undefined)).to.equal('');
});
it('translates null to empty string', function() {
expect(encode(null)).to.equal('');
expect(encode(undefined)).to.equal('');
expect(decode(undefined)).to.equal('');
});
it('translates other types to string', function() {
expect(encode('str')).to.equal('str');
expect(encode(23)).to.equal('23');
expect(encode(false)).to.equal('false');
expect(encode({})).to.equal('[object Object]');
expect(encode([1, 2, 3])).to.equal('1,2,3');
expect(encode({toString() {return 'foo';}})).to.equal('foo');
});
it('decodes undefined to empty string', function() {
expect(decode(undefined)).to.equal('');
});
});
describe('number', function() {
const encode = types.number.encode;
const decode = types.number.decode;
it('fails for non-numbers', function() {
expect(() => encode()).to.throw(Error, 'undefined is not a number');
expect(() => encode(null)).to.throw(Error, 'null is not a number');
expect(() => encode(true)).to.throw(Error, 'true is not a number');
expect(() => encode('')).to.throw(Error, '"" is not a number');
expect(() => encode('23x')).to.throw(Error, '"23x" is not a number');
expect(() => encode({})).to.throw(Error, '{} is not a number');
expect(() => encode([])).to.throw(Error, '[] is not a number');
});
it('fails for invalid numbers', function() {
const values = [NaN, 1 / 0, -1 / 0];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, value + ' is not a valid number');
});
});
it('accepts all valid kinds of numbers', function() {
expect(encode(0)).to.equal(0);
expect(encode(1)).to.equal(1);
expect(encode(-1)).to.equal(-1);
expect(encode(10e10)).to.equal(10e10);
expect(encode(10e-10)).to.equal(10e-10);
});
it('accepts strings', function() {
expect(encode('0')).to.equal(0);
expect(encode('1')).to.equal(1);
expect(encode('-1')).to.equal(-1);
expect(encode('3.14')).to.equal(3.14);
expect(encode('-3.14')).to.equal(-3.14);
expect(encode('.01')).to.equal(0.01);
});
it('decodes undefined to 0', function() {
expect(decode(undefined)).to.equal(0);
});
});
describe('natural', function() {
const encode = types.natural.encode;
const decode = types.natural.decode;
it('fails for non-numbers', function() {
expect(() => encode()).to.throw(Error, 'undefined is not a number');
expect(() => encode(null)).to.throw(Error, 'null is not a number');
expect(() => encode(true)).to.throw(Error, 'true is not a number');
expect(() => encode('')).to.throw(Error, '"" is not a number');
expect(() => encode('23x')).to.throw(Error, '"23x" is not a number');
expect(() => encode({})).to.throw(Error, '{} is not a number');
expect(() => encode([])).to.throw(Error, '[] is not a number');
});
it('fails for invalid numbers', function() {
const values = [NaN, 1 / 0, -1 / 0];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, value + ' is not a valid number');
});
});
it('accepts natural number including zero', function() {
expect(encode(0)).to.equal(0);
expect(encode(1)).to.equal(1);
expect(encode(10e10)).to.equal(10e10);
});
it('normalizes negative values', function() {
expect(encode(-1)).to.equal(0);
expect(encode(-1.5)).to.equal(0);
});
it('rounds given value', function() {
expect(encode(0.4)).to.equal(0);
expect(encode(1.1)).to.equal(1);
expect(encode(1.9)).to.equal(2);
});
it('accepts strings', function() {
expect(encode('0')).to.equal(0);
expect(encode('1')).to.equal(1);
expect(encode('-1')).to.equal(0);
expect(encode('0.7')).to.equal(1);
});
it('decodes undefined to 0', function() {
expect(decode(undefined)).to.equal(0);
});
});
describe('integer', function() {
const encode = types.integer.encode;
const decode = types.integer.decode;
it('fails for non-numbers', function() {
expect(() => encode()).to.throw(Error, 'undefined is not a number');
expect(() => encode(null)).to.throw(Error, 'null is not a number');
expect(() => encode(true)).to.throw(Error, 'true is not a number');
expect(() => encode('')).to.throw(Error, '"" is not a number');
expect(() => encode('23x')).to.throw(Error, '"23x" is not a number');
expect(() => encode({})).to.throw(Error, '{} is not a number');
expect(() => encode([])).to.throw(Error, '[] is not a number');
});
it('fails for invalid numbers', function() {
const values = [NaN, 1 / 0, -1 / 0];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, value + ' is not a valid number');
});
});
it('accepts positive and negative numbers including zero', function() {
expect(encode(-(10e10))).to.equal(-(10e10));
expect(encode(-1)).to.equal(-1);
expect(encode(0)).to.equal(0);
expect(encode(1)).to.equal(1);
expect(encode(10e10)).to.equal(10e10);
});
it('rounds given value', function() {
expect(encode(-1.9)).to.equal(-2);
expect(encode(-1.1)).to.equal(-1);
expect(encode(-0.4)).to.equal(0);
expect(encode(0.4)).to.equal(0);
expect(encode(1.1)).to.equal(1);
expect(encode(1.9)).to.equal(2);
});
it('accepts strings', function() {
expect(encode('0')).to.equal(0);
expect(encode('1')).to.equal(1);
expect(encode('-1')).to.equal(-1);
expect(encode('0.7')).to.equal(1);
});
it('decodes undefined to 0', function() {
expect(decode(undefined)).to.equal(0);
});
});
describe('function', function() {
const encode = types.function.encode;
it('accepts functions', function() {
const fn = function() {};
expect(encode(fn)).to.equal(fn);
});
it('fails for non-functions', function() {
const values = ['', 'foo', 23, null, undefined, true, false, {}, []];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, / is not a function/);
});
});
});
describe('choice', function() {
const encode = types.choice.encode;
it('allows string values given in array', function() {
const accepted = ['1', 'foo', 'bar'];
expect(encode('1', accepted)).to.equal('1');
expect(encode('foo', accepted)).to.equal('foo');
expect(encode('bar', accepted)).to.equal('bar');
});
it('rejects string values not given in array', function() {
const accepted = ['x', 'y', 'z'];
['1', 'foo', 'bar'].forEach((value) => {
expect(() => {
encode(value, accepted);
}).to.throw(Error, 'Accepting "x", "y", "z", given was: "' + value + '"');
});
});
});
describe('nullable', function() {
const encode = types.nullable.encode;
it('allows null', function() {
expect(encode(null)).to.be.null;
});
it('allows null or alternate check', function() {
expect(encode(null, 'natural')).to.be.null;
expect(encode(1.1, 'natural')).to.equal(1);
});
it('rejects alternate check', function() {
expect(() => {
encode(NaN, 'natural');
}).to.throw();
});
});
describe('opacity', function() {
const encode = types.opacity.encode;
it('fails for non-numbers', function() {
expect(() => encode()).to.throw(Error, 'undefined is not a number');
expect(() => encode(null)).to.throw(Error, 'null is not a number');
expect(() => encode(true)).to.throw(Error, 'true is not a number');
expect(() => encode('')).to.throw(Error, '"" is not a number');
expect(() => encode('23x')).to.throw(Error, '"23x" is not a number');
expect(() => encode({})).to.throw(Error, '{} is not a number');
expect(() => encode([])).to.throw(Error, '[] is not a number');
});
it('fails for invalid numbers', function() {
const values = [NaN, 1 / 0, -1 / 0];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, value + ' is not a valid number');
});
});
it('clamps out-of-bounds numbers', function() {
expect(encode(-1)).to.equal(0);
expect(encode(-0.1)).to.equal(0);
expect(encode(1.1)).to.equal(1);
expect(encode(1e10)).to.equal(1);
});
it('accepts strings', function() {
expect(encode('0')).to.equal(0);
expect(encode('0.1')).to.equal(0.1);
expect(encode('1')).to.equal(1);
});
it('accepts natural numbers between (including) zero and one', function() {
expect(encode(0)).to.equal(0);
expect(encode(0.5)).to.equal(0.5);
expect(encode(1)).to.equal(1);
});
});
describe('transform', function() {
const encode = types.transform.encode;
const defaultValue = {
rotation: 0,
scaleX: 1,
scaleY: 1,
translationX: 0,
translationY: 0,
translationZ: 0
};
const customValue = {
rotation: 1.2,
scaleX: 2,
scaleY: 0.5,
translationX: -40,
translationY: +40,
translationZ: +20
};
it('accepts complete, valid values', function() {
expect(encode(defaultValue)).to.eql(defaultValue);
expect(encode(customValue)).to.eql(customValue);
});
it('auto-completes values', function() {
const value = omit(customValue, ['scaleX', 'translationY']);
const expected = {
rotation: 1.2,
scaleX: 1,
scaleY: 0.5,
translationX: -40,
translationY: 0,
translationZ: +20
};
expect(encode(value)).to.eql(expected);
expect(encode({})).to.eql(defaultValue);
});
it('fails for invalid numbers', function() {
[
{rotation: null},
{scaleX: undefined},
{scaleY: NaN},
{translationX: 1 / 0},
{translationY: -1 / 0},
{translationZ: 1 / 0}
].forEach((value) => {
expect(() => {
encode(value);
}).to.throw();
});
});
it('fails for unknown keys', function() {
expect(() => {
encode({foo: 1});
}).to.throw(Error, '{"foo":1} is not a valid transformation containing key "foo"');
});
});
describe('array', function() {
const encode = types.array.encode;
it('passes any array', function() {
expect(encode([1, 'a', true])).to.eql([1, 'a', true]);
});
it('converts null to empty array', function() {
expect(encode(null)).to.eql([]);
});
it('converts undefined to empty array', function() {
expect(encode(undefined)).to.eql([]);
});
it('does not copy array', function() {
const input = [1, 2, 3];
expect(encode(input)).to.equal(input);
});
it('fails for non-arrays', function() {
const values = [0, 1, '', 'foo', false, true, {}, {length: 0}];
values.forEach((value) => {
expect(() => {
encode(value);
}).to.throw(Error, / is not an Array/);
});
});
it('performs optional item checks', function() {
expect(encode(['foo', 1, true], 'string')).to.eql(['foo', '1', 'true']);
expect(() => encode(['foo'], 'integer')).to.throw(Error, '"foo" is not a number');
});
});
describe('boxDimensions encode', function() {
const encode = types.boxDimensions.encode;
it('passes complete number objects', function() {
expect(encode({left: 1, right: 2, top: 3, bottom: 4})).to.deep.equal({left: 1, right: 2, top: 3, bottom: 4});
});
it('normalizes object', function() {
expect(encode({left: 1, right: '2px', top: '3'})).to.deep.equal({left: 1, right: 2, top: 3, bottom: 0});
});
it('converts numbers to objects', function() {
expect(encode(4)).to.deep.equal({left: 4, right: 4, top: 4, bottom: 4});
});
it('converts array of number to objects', function() {
expect(encode([1, 2, 3, 4])).to.deep.equal({left: 4, right: 2, top: 1, bottom: 3});
expect(encode([1, 2, 3])).to.deep.equal({left: 2, right: 2, top: 1, bottom: 3});
expect(encode([1, 2])).to.deep.equal({left: 2, right: 2, top: 1, bottom: 1});
expect(encode([1])).to.deep.equal({left: 1, right: 1, top: 1, bottom: 1});
expect(encode([null])).to.deep.equal({left: 0, right: 0, top: 0, bottom: 0});
});
it('converts array of string to objects', function() {
expect(encode(['1', '2px', 3, 4])).to.deep.equal({left: 4, right: 2, top: 1, bottom: 3});
});
it('converts space separated strings to objects', function() {
expect(encode('1 2 3 4')).to.deep.equal({left: 4, right: 2, top: 1, bottom: 3});
expect(encode('1 2 3 ')).to.deep.equal({left: 2, right: 2, top: 1, bottom: 3});
expect(encode(' 1 2')).to.deep.equal({left: 2, right: 2, top: 1, bottom: 1});
expect(encode(' 1 ')).to.deep.equal({left: 1, right: 1, top: 1, bottom: 1});
});
it('converts space separated strings with unit to objects', function() {
expect(encode('1px 2px 3px 4px')).to.deep.equal({left: 4, right: 2, top: 1, bottom: 3});
expect(encode('1px')).to.deep.equal({left: 1, right: 1, top: 1, bottom: 1});
});
it('converts null to 0', function() {
expect(encode(null)).to.deep.equal({left: 0, right: 0, top: 0, bottom: 0});
});
it('fails for invalid types', function() {
expect(() => encode('foo')).to.throw(Error, '"foo" is not a valid BoxDimension value');
expect(() => encode(false)).to.throw(Error, 'false is not a valid BoxDimension value');
expect(() => encode(true)).to.throw(Error, 'true is not a valid BoxDimension value');
expect(() => encode([])).to.throw(Error, '[] is not a valid BoxDimension value');
expect(() => encode([1, 2, 3, 4, 5])).to.throw(Error, '[1, 2, 3, 4, 5] is not a valid BoxDimension value');
expect(() => encode(['foo'])).to.throw(Error, '["foo"] is not a valid BoxDimension value');
});
});
describe('bounds', function() {
const decode = types.bounds.decode;
it('decodes array', function() {
expect(decode([1, 2, 3, 4])).to.deep.equal({left: 1, top: 2, width: 3, height: 4});
});
it('decodes undefined to default values', function() {
expect(decode(undefined)).to.deep.equal({left: 0, top: 0, width: 0, height: 0});
});
});
});
|
DROP TABLE IF EXISTS `T_BOOK_STOCK`;
CREATE TABLE `T_BOOK_STOCK` (
`ISBN` varchar(255) COLLATE utf8_bin NOT NULL,
`STOCK` int(11) DEFAULT NULL,
PRIMARY KEY (`ISBN`)
);
INSERT INTO T_BOOK_STOCK(ISBN, STOCK) VALUES('1001', 10);
INSERT INTO T_BOOK_STOCK(ISBN, STOCK) VALUES('1002', 10);
DROP TABLE IF EXISTS `T_BOOK`;
CREATE TABLE `T_BOOK` (
`ISBN` varchar(255) COLLATE utf8_bin NOT NULL,
`BOOK_NAME` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`PRICE` float DEFAULT NULL,
PRIMARY KEY (`ISBN`)
);
INSERT INTO T_BOOK(ISBN, BOOK_NAME, PRICE) VALUES('1001', 'Java', 100);
INSERT INTO T_BOOK(ISBN, BOOK_NAME, PRICE) VALUES('1002', 'Oracle', 70);
DROP TABLE IF EXISTS `T_ACCOUNT`;
CREATE TABLE `T_ACCOUNT` (
`USERNAME` varchar(255) COLLATE utf8_bin NOT NULL,
`BALANCE` int(11) DEFAULT NULL,
PRIMARY KEY (`USERNAME`)
);
INSERT INTO T_ACCOUNT(USERNAME, BALANCE) VALUES('AA', 300);
|
import pytest
from db.redis.heartbeat import RedisHeartBeat
from tests.base.case import BaseTest
@pytest.mark.redis_mark
class TestRedisHeartBeat(BaseTest):
def test_redis_heartbeat_raises_for_wrong_values(self):
with self.assertRaises(ValueError):
RedisHeartBeat(experiment=1, job=1)
with self.assertRaises(ValueError):
RedisHeartBeat(job=1, build=1)
with self.assertRaises(ValueError):
RedisHeartBeat(experiment=1, job=1, build=1)
with self.assertRaises(ValueError):
RedisHeartBeat()
def test_redis_heartbeat_experiment(self):
heartbeat = RedisHeartBeat(experiment=1)
self.assertEqual(heartbeat.redis_key, RedisHeartBeat.KEY_EXPERIMENT.format(1))
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.experiment_is_alive(1), False)
heartbeat.ping()
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.experiment_is_alive(1), True)
heartbeat.clear()
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.experiment_is_alive(1), False)
RedisHeartBeat.experiment_ping(1)
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.experiment_is_alive(1), True)
def test_redis_heartbeat_job(self):
heartbeat = RedisHeartBeat(job=1)
self.assertEqual(heartbeat.redis_key, RedisHeartBeat.KEY_JOB.format(1))
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.job_is_alive(1), False)
heartbeat.ping()
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.job_is_alive(1), True)
heartbeat.clear()
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.job_is_alive(1), False)
RedisHeartBeat.job_ping(1)
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.job_is_alive(1), True)
def test_redis_heartbeat_build(self):
heartbeat = RedisHeartBeat(build=1)
self.assertEqual(heartbeat.redis_key, RedisHeartBeat.KEY_BUILD.format(1))
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.build_is_alive(1), False)
heartbeat.ping()
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.build_is_alive(1), True)
heartbeat.clear()
self.assertEqual(heartbeat.is_alive(), False)
self.assertEqual(RedisHeartBeat.build_is_alive(1), False)
RedisHeartBeat.build_ping(1)
self.assertEqual(heartbeat.is_alive(), True)
self.assertEqual(RedisHeartBeat.build_is_alive(1), True)
|
/*
* Copyright (c) 2014, Victor Nazarov <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.github.sviperll.adt4j.model;
import com.github.sviperll.adt4j.model.util.Types;
import com.github.sviperll.adt4j.model.util.VariableNameSource;
import com.helger.jcodemodel.AbstractJType;
import com.helger.jcodemodel.IJExpression;
import com.helger.jcodemodel.JBlock;
import com.helger.jcodemodel.JConditional;
import com.helger.jcodemodel.JExpr;
import com.helger.jcodemodel.JForLoop;
import com.helger.jcodemodel.JInvocation;
import com.helger.jcodemodel.JOp;
import com.helger.jcodemodel.JVar;
/**
*
* @author Victor Nazarov <[email protected]>
*/
class HashCodeMethod {
private final Types types;
private final int hashCodeBase;
private final JBlock methodBody;
private final VariableNameSource methodNameSource;
HashCodeMethod(Types types, int hashCodeBase, JBlock methodBody, VariableNameSource methodNameSource) {
this.types = types;
this.hashCodeBase = hashCodeBase;
this.methodBody = methodBody;
this.methodNameSource = methodNameSource;
}
Body createBody(int tag) {
JVar result = methodBody.decl(types._int, methodNameSource.get("result"), JExpr.lit(tag));
return new Body(result, methodBody, methodNameSource);
}
class Body {
private final JBlock body;
private final VariableNameSource nameSource;
private final JVar result;
private Body(JVar result, JBlock body, VariableNameSource nameSource) {
this.result = result;
this.body = body;
this.nameSource = nameSource;
}
void appendNullableValue(AbstractJType type, IJExpression value) {
if (!type.isReference())
throw new AssertionError("appendNullableValue called for non-reference type");
else {
JConditional _if = body._if(value.eq(JExpr._null()));
Body thenBody = new Body(result, _if._then(), nameSource);
thenBody.appendNotNullValue(types._int, JExpr.lit(0));
Body elseBody = new Body(result, _if._else(), nameSource);
elseBody.appendNotNullValue(type, value);
}
}
void appendNotNullValue(AbstractJType type, IJExpression value) {
if (type.isArray()) {
VariableNameSource localNames = nameSource.forBlock();
JForLoop _for = body._for();
JVar i = _for.init(types._int, localNames.get("i"), JExpr.lit(0));
_for.test(i.lt(value.ref("length")));
_for.update(i.incr());
Body forBody = new Body(result, _for.body(), localNames);
if (type.elementType().isReference())
forBody.appendNullableValue(type.elementType(), value.component(i));
else
forBody.appendNotNullValue(type.elementType(), value.component(i));
} else if (!type.isPrimitive()) {
appendNotNullValue(types._int, value.invoke("hashCode"));
} else if (type.name().equals("double")) {
JInvocation invocation = types._Double.staticInvoke("doubleToLongBits");
invocation.arg(value);
appendNotNullValue(types._long, invocation);
} else if (type.name().equals("float")) {
JInvocation invocation = types._Float.staticInvoke("floatToIntBits");
invocation.arg(value);
appendNotNullValue(types._int, invocation);
} else if (type.name().equals("boolean")) {
appendNotNullValue(types._int, JOp.cond(value, JExpr.lit(0), JExpr.lit(1)));
} else if (type.name().equals("long")) {
appendNotNullValue(types._int, JExpr.cast(types._int, value.xor(value.shrz(JExpr.lit(32)))));
} else {
body.assign(result, result.mul(JExpr.lit(hashCodeBase)).plus(value));
}
}
IJExpression result() {
return result;
}
}
}
|
mkdir data
cd data
wget -c http://ufldl.stanford.edu/housenumbers/train_32x32.mat http://ufldl.stanford.edu/housenumbers/test_32x32.mat
wget http://ufldl.stanford.edu/housenumbers/train_32x32.mat -OutFile train_32x32.mat
wget http://ufldl.stanford.edu/housenumbers/test_32x32.mat -OutFile test_32x32.mat |
<?php
namespace inklabs\kommerce\EntityDTO\Builder;
use inklabs\kommerce\Entity\Price;
use inklabs\kommerce\EntityDTO\PriceDTO;
class PriceDTOBuilder implements DTOBuilderInterface
{
/** @var Price */
protected $entity;
/** @var PriceDTO */
protected $entityDTO;
/** @var DTOBuilderFactoryInterface */
protected $dtoBuilderFactory;
public function __construct(Price $price, DTOBuilderFactoryInterface $dtoBuilderFactory)
{
$this->entity = $price;
$this->dtoBuilderFactory = $dtoBuilderFactory;
$this->initializePriceDTO();
$this->entityDTO->origUnitPrice = $this->entity->origUnitPrice;
$this->entityDTO->unitPrice = $this->entity->unitPrice;
$this->entityDTO->origQuantityPrice = $this->entity->origQuantityPrice;
$this->entityDTO->quantityPrice = $this->entity->quantityPrice;
}
protected function initializePriceDTO()
{
$this->entityDTO = new PriceDTO;
}
/**
* @return static
*/
public function withCatalogPromotions()
{
$catalogPromotions = $this->entity->getCatalogPromotions();
if ($catalogPromotions !== null) {
foreach ($catalogPromotions as $catalogPromotion) {
$this->entityDTO->catalogPromotions[] = $this->dtoBuilderFactory
->getCatalogPromotionDTOBuilder($catalogPromotion)
->build();
}
}
return $this;
}
/**
* @return static
*/
public function withProductQuantityDiscounts()
{
$productQuantityDiscounts = $this->entity->getProductQuantityDiscounts();
if ($productQuantityDiscounts !== null) {
foreach ($productQuantityDiscounts as $productQuantityDiscount) {
$this->entityDTO->productQuantityDiscounts[] = $this->dtoBuilderFactory
->getProductQuantityDiscountDTOBuilder($productQuantityDiscount)
->build();
}
}
return $this;
}
/**
* @return static
*/
public function withAllData()
{
return $this
->withCatalogPromotions()
->withProductQuantityDiscounts();
}
protected function preBuild()
{
}
public function build()
{
$this->preBuild();
unset($this->entity);
return $this->entityDTO;
}
}
|
from __future__ import annotations
import collections
import dataclasses
import os
import pathlib
import re
from typing import MutableMapping, Iterator
@dataclasses.dataclass(eq=True, frozen=True)
class Coord:
x: int
y: int
def enumerate_simple(self, other: Coord) -> Iterator[Coord]:
x0 = min(self.x, other.x)
x1 = max(self.x, other.x)
y0 = min(self.y, other.y)
y1 = max(self.y, other.y)
if x0 == x1:
for y in range(y0, y1 + 1):
yield Coord(x0, y)
elif y0 == y1:
for x in range(x0, x1 + 1):
yield Coord(x, y0)
else:
# For now, only consider horizontal and vertical lines
return
def enumerate_with_diagonal(self, other: Coord) -> Iterator[Coord]:
x0 = min(self.x, other.x)
x1 = max(self.x, other.x)
y0 = min(self.y, other.y)
y1 = max(self.y, other.y)
if x0 == x1:
for y in range(y0, y1 + 1):
yield Coord(x0, y)
elif y0 == y1:
for x in range(x0, x1 + 1):
yield Coord(x, y0)
elif x1 - x0 == y1 - y0:
# ugly, probably can be simplified
dx = (other.x - self.x) // (x1 - x0)
dy = (other.y - self.y) // (y1 - y0)
for i in range(x1 - x0 + 1):
yield Coord(self.x + i * dx, self.y + i * dy)
else:
raise ValueError
Input = list[tuple[Coord, Coord]]
Map = MutableMapping[Coord, int]
def input_from_file(path: os.PathLike) -> Input:
with open(path) as fh:
return [parse_line(line) for line in fh]
def parse_line(line: str) -> tuple[Coord, Coord]:
m = re.match(r"(\d+),(\d+) -> (\d+),(\d+)", line)
assert m is not None
x0, y0, x1, y1 = map(int, m.groups())
return Coord(x0, y0), Coord(x1, y1)
def solve(inputs: Input) -> int:
vent_map: Map = collections.defaultdict(int)
for p0, p1 in inputs:
for p in p0.enumerate_simple(p1):
vent_map[p] += 1
return sum(v > 1 for v in vent_map.values())
def solve_second(inputs: Input) -> int:
vent_map: Map = collections.defaultdict(int)
for p0, p1 in inputs:
for p in p0.enumerate_with_diagonal(p1):
vent_map[p] += 1
return sum(v > 1 for v in vent_map.values())
if __name__ == "__main__":
assert solve(input_from_file(pathlib.Path("ex.txt"))) == 5
print(solve(input_from_file(pathlib.Path("input.txt"))))
assert solve_second(input_from_file(pathlib.Path("ex.txt"))) == 12
print(solve_second(input_from_file(pathlib.Path("input.txt"))))
|
package com.vanniktech.android.apk.size
import java.text.DecimalFormat
import java.text.DecimalFormatSymbols
import java.util.Locale
private const val BYTES_TO_KILOBYTES = 1000.toDouble()
private const val BYTES_TO_MEGABYTRES = 1000000.toDouble()
object ApkSizeTools {
fun convertBytesToMegaBytes(bytes: Long): String = getDecimalFormat().format(bytes / BYTES_TO_MEGABYTRES)
fun convertBytesToKiloBytes(bytes: Long): String = getDecimalFormat().format(bytes / BYTES_TO_KILOBYTES)
private fun getDecimalFormat(): DecimalFormat = DecimalFormat("#.##", DecimalFormatSymbols(Locale.US))
}
|
class CreateBotCampaigns < ActiveRecord::Migration[5.1]
def change
create_table :bot_campaigns do |t|
t.belongs_to :bot, null: false, index: true, foreign_key: true
t.belongs_to :inbox, null: false, index: true, foreign_key: true
t.belongs_to :campaign, null: false, index: true, foreign_key: true
t.timestamps
end
add_index :bot_campaigns, [:bot_id, :campaign_id], unique: true
end
end
|
-------------------------------------------------------------------------------
-- |
-- Module : Absol.Metaverify
-- Description : Functions for the verification of metaspec semantics.
-- Copyright : (c) Ara Adkins (2017)
-- License : See LICENSE file
--
-- Maintainer : Ara Adkins
-- Stability : experimental
-- Portability : GHC
--
-- This file contains functions for performing the semantic verification process
-- on the AST of a metaspec file.
-- To an extent it mirrors the structure of the parser.
--
-------------------------------------------------------------------------------
module Absol.Metaverify
(
verifyLanguage
) where
import Absol.Metaparse.Grammar
import Absol.Metaverify.Collate
import Absol.Metaverify.Diagnostics
import Absol.Metaverify.RuleTag
import Absol.Metaverify.State
import Absol.Utilities (countOccurrences)
import Data.Either (lefts, rights)
import qualified Data.List as L (delete, find, nub,
permutations)
import qualified Data.Map as M
import Data.Maybe (fromJust, isJust)
import qualified Data.Set as S
import Debug.Trace
-- | A type for storing the non-terminals defined in a syntax expression.
type NTCountMap = M.Map NonTerminal Integer
-- | Verifies the input language.
--
-- If the language can be proven complete, this returns True. In the case where
-- that property cannot be shown, an error string is returned, describing the
-- nature of the error.
--
-- It will also alert the user to any unused productions.
verifyLanguage :: Metaspec -> (Bool, String)
verifyLanguage x = case runState runVerification (collateASTData x) of
(True, VerifierState (tag, _) _ _ _) ->
(True, "LANGUAGE: " ++ prettyPrintRuleTag tag)
(False, VerifierState (tag, _) prod _ _) ->
(False,
"LANGUAGE: " ++ prettyPrintRuleTag tag ++ "\n\n" ++
printLanguageDiagnostics prod
)
-- | Runs the verification process on the language rules.
--
-- It begins at the start rule and recurses through the productions of the
-- language.
runVerification :: VState Bool
runVerification = do
(_, initRule) <- gets startRule
startRuleResult <- verifyRule $ return initRule
modify (updateStartRuleTag startRuleResult)
case startRuleResult of
Terminates -> return True
_ -> return False
-- | Verifies a language rule.
--
-- It will trace the current verification path in case of error.
verifyRule :: VState LanguageRuleBody -> VState RuleTag
verifyRule rule = do
(LanguageRuleBody expr) <- rule
verifySyntaxExpr $ return expr
-- | Verifies a syntax expression.
verifySyntaxExpr :: VState SyntaxExpression -> VState RuleTag
verifySyntaxExpr expr = do
(SyntaxExpression alternatives) <- expr
let result = (verifyAlternative . return) <$> alternatives
combineTerminationResults result
-- | Verifies a syntax alternative.
--
-- Each syntax alternative can have its semantics verified independently, so
-- it is simple to verify these in one go.
verifyAlternative :: VState SyntaxAlternative -> VState RuleTag
verifyAlternative alt = do
alternative <- alt
if hasSemantics alternative then
verifyDefinedSemantics alt
else
verifySubSemantics alt
-- | Verifies a syntax alternative where the semantics are defined by hand.
verifyDefinedSemantics :: VState SyntaxAlternative -> VState RuleTag
verifyDefinedSemantics alt = do
(SyntaxAlternative syntax semantics) <- alt
let (LanguageRuleSemantics rule) = fromJust semantics
let ntsInSyntax = getNTList syntax
sequence_ $ (markAsTouched . return) <$> M.keys ntsInSyntax
case rule of
x@EnvironmentInputRule{} ->
verifyEnvironmentInputRule $ return (x, ntsInSyntax)
(EnvironmentAccessRuleProxy ear) ->
verifyEnvironmentAccessRule $ return (ear, ntsInSyntax)
(SpecialSyntaxRuleProxy ssr) ->
verifySpecialSyntaxRule $ return (ssr, ntsInSyntax)
(SemanticEvaluationRuleList xs) ->
verifySemanticEvaluationRuleList $ return (xs, ntsInSyntax)
-- Marks a given non-terminal as having been visited but not processed.
markAsTouched :: VState NonTerminal -> VState ()
markAsTouched nt = do
nonTerminal <- nt
modify (updateRuleTag Touched nonTerminal)
return ()
-- | Gets a list of NonTerminals and their counts from a syntactic expression.
getNTList :: [SyntaxTerm] -> NTCountMap
getNTList terms = toCountMap $ concat $ ntsInTerm <$> terms
where
ntsInTerm :: SyntaxTerm -> [NonTerminal]
ntsInTerm (SyntaxTerm (SyntaxFactor _ primary) _) = ntsInPrimary primary
ntsInPrimary :: SyntaxPrimary -> [NonTerminal]
ntsInPrimary (SyntaxSpecial _) = []
ntsInPrimary (TerminalProxy _) = []
ntsInPrimary (NonTerminalProxy nt) = [nt]
ntsInPrimary (SyntaxOptional expr) = ntsInExpr expr
ntsInPrimary (SyntaxRepeated expr) = ntsInExpr expr
ntsInPrimary (SyntaxGrouped expr) = ntsInExpr expr
ntsInExpr :: SyntaxExpression -> [NonTerminal]
ntsInExpr (SyntaxExpression alts) = concat $ ntsInAlternative <$> alts
ntsInAlternative :: SyntaxAlternative -> [NonTerminal]
ntsInAlternative (SyntaxAlternative altTerms _) =
concat $ ntsInTerm <$> altTerms
toCountMap :: [NonTerminal] -> NTCountMap
toCountMap nts =
M.fromList [ (k, countOccurrences k nts) | k <- L.nub nts ]
-- | Verifies semantics taking the form of an environment access rule.
--
-- Environment stores will always terminate, so this function needs to verify if
-- the operands exist and that they themselves terminate.
verifyEnvironmentInputRule
:: VState (SemanticRule, NTCountMap)
-> VState RuleTag
verifyEnvironmentInputRule input = do
(rule, nts) <- input
case rule of
(EnvironmentInputRule _ accessBlock accessList) -> do
let accessBlocks = accessBlock:accessList
ntPairs = extractSyntaxAccessBlock <$> accessBlocks
result = rights $ checkNT nts <$> ntPairs
subResults <- sequence
$ (\(x,_) -> verifyNonTerminal $ return x) <$> ntPairs
let subResult = foldl tagPlus Terminates subResults
if null result then
return $ Terminates `tagPlus` subResult
else
return $
(DoesNotTerminate $
(resultToErr NonExistentSubterms) <$> result)
`tagPlus` subResult
_ -> return Terminates
-- | Extracts syntax access blocks into a suitable form for processing.
extractSyntaxAccessBlock :: SyntaxAccessBlock -> (NonTerminal, Integer)
extractSyntaxAccessBlock (SyntaxAccessBlock nt (SyntaxAccessor i)) = (nt, i)
-- | Verifies semantics taking the form of an environment input rule.
--
-- Access rules depend only on the thing having been stored, and have well-
-- defined semantics in either case. They have already been verified to rely
-- on terminals that exist, and so the checking is not performed.
--
-- Environment accesses have a well-defined error state in cases where the
-- element does not exist.
verifyEnvironmentAccessRule
:: VState (EnvironmentAccessRule, NTCountMap)
-> VState RuleTag
verifyEnvironmentAccessRule _ = return Terminates
-- | Verifies semantics taking the form of a special syntax rule.
--
-- Special Syntax Rules themselves are guaranteed to terminate, so this function
-- just needs to check if the operands exist, and that they themselves will
-- terminate.
verifySpecialSyntaxRule
:: VState (SpecialSyntaxRule, NTCountMap)
-> VState RuleTag
verifySpecialSyntaxRule input = do
(SpecialSyntaxRule _ _ accessList, nts) <- input
let ntList = fromJust <$>
filter isJust (getNTsFromAccessBlockOrRule <$> accessList)
result = rights $ checkNT nts <$> ntList
subResults <- sequence $ (\(x,_) -> verifyNonTerminal $ return x) <$> ntList
let subResult = foldl tagPlus Terminates subResults
if null result then
return $ Terminates `tagPlus` subResult
else
return $ (DoesNotTerminate $
resultToErr NonExistentSubterms <$> result) `tagPlus` subResult
-- | Gets the non-terminals from syntax access blocks or env access rules.
getNTsFromAccessBlockOrRule :: AccessBlockOrRule -> Maybe (NonTerminal, Integer)
getNTsFromAccessBlockOrRule (Left (SyntaxAccessBlock nt (SyntaxAccessor i))) =
Just (nt, i)
getNTsFromAccessBlockOrRule (Right _) = Nothing
-- | Verifies a list of semantic evaluation rules
verifySemanticEvaluationRuleList
:: VState (SemanticEvaluationRuleList, NTCountMap)
-> VState RuleTag
verifySemanticEvaluationRuleList input = do
args@(rules, _) <- input
guardsComplete <- verifyGuards $ return rules
rulesComplete <- verifySemanticRules $ return args
let tests = [guardsComplete, rulesComplete] :: [RuleTag]
return $ foldl tagPlus Terminates tests
-- | Verifies that the semantic rules meet their requirements.
verifySemanticRules
:: VState (SemanticEvaluationRuleList, NTCountMap)
-> VState RuleTag
verifySemanticRules input = do
args@(rules, _) <- input
satisfiesEvaluationCriterion <- verifyEvaluationCriterion $ return rules
satisfiesSemanticForm <- verifySemanticForm $ return args
let tests :: [RuleTag]
tests = [satisfiesEvaluationCriterion, satisfiesSemanticForm]
return $ foldl tagPlus Terminates tests
-- | Checks if the evaluation rules satisfy their restriction.
--
-- The output variable must be on the left of the leftmost evaluation rule. This
-- is the only location in which it may occur. Variables from sub-evaluations
-- may only appear on the RHS of an assignment.
--
-- As it is enforced by the parser, this function can rely on having at least
-- one semantic evaluation.
verifyEvaluationCriterion
:: VState SemanticEvaluationRuleList
-> VState RuleTag
verifyEvaluationCriterion list = do
rules <- list
let rulePairs = getOutputRulePair <$> rules
opVars = getOperationVars <$> rulePairs
evalVars = getEvaluationVars <$> rules
results = checkVariableEvalCriteria <$> zip opVars evalVars
allVarsInOrder = concat $ (getVarsInOrderForEvals . snd) <$> rulePairs
varsOrdered = and $ checkVarDefinitionOrdering <$> allVarsInOrder
if (and results) && varsOrdered then
return Terminates
else
return $ DoesNotTerminate
[(IncorrectEvaluationForm, [], "Malformed semantic operation(s).")]
where
getOutputRulePair
:: SemanticEvaluationRule
-> (SemanticIdentifier, [SemanticOperationAssignment])
getOutputRulePair
(SemanticEvaluationRule _ ident (SemanticOperationList evals) _ _) =
(ident, evals)
-- | Gets the list of variables defined before and after a temporary.
--
-- The output is [before], temporary, [after].
getVarsInOrderForEvals
:: [SemanticOperationAssignment]
-> [(SemanticIdentifier, [SemanticIdentifier])]
getVarsInOrderForEvals [] = []
getVarsInOrderForEvals ((SemanticOperationAssignment x op):xs) =
(x, getEvalVar op ++ (concat $ evalVars <$> xs)) : getVarsInOrderForEvals xs
-- | Ensures that for input [before], temporary, [after] that temporary is not
-- a member of after.
--
-- This ensures the final criteria to verify the semantic evaluations.
checkVarDefinitionOrdering
:: (SemanticIdentifier, [SemanticIdentifier])
-> Bool
checkVarDefinitionOrdering (semId, ids) = semId `notElem` ids
-- | Checks that the usage of variables in the semantic operation is correct.
--
-- This means that it has to obey the evaluation rules.
checkVariableEvalCriteria
:: (
(SemanticIdentifier, [SemanticIdentifier], [SemanticIdentifier]),
[SemanticIdentifier]
)
-> Bool
checkVariableEvalCriteria ((output, temps, vars), evalVars) = let
evalsNotInTemps = and $ (`notElem` temps) <$> evalVars
varsInTempOrEval = and $ (`elem` (temps ++ evalVars)) <$> vars
outNotInVars = output `notElem` vars
outNotInTemps = output `notElem` temps
outNotInEval = output `notElem` evalVars
in
evalsNotInTemps && outNotInVars && outNotInTemps
&& outNotInEval && varsInTempOrEval
-- | Separates the variables used in the evaluations into three categories.
--
-- In order, these are the output variable, any temporary that is assigned to,
-- and any variables used as part of the evaluation operations. The output
-- variable is not a temporary, and hence does not appear in the first list.
getOperationVars
:: (SemanticIdentifier, [SemanticOperationAssignment])
-> (SemanticIdentifier, [SemanticIdentifier], [SemanticIdentifier])
getOperationVars (ident, opAssigns) =
(ident, L.delete ident (temps <$> opAssigns),
concat (evalVars <$> opAssigns)
)
where
temps :: SemanticOperationAssignment -> SemanticIdentifier
temps (SemanticOperationAssignment identifier _) = identifier
-- | Extracts the variables on the RHS of a semantic evaluation assignment.
evalVars :: SemanticOperationAssignment -> [SemanticIdentifier]
evalVars (SemanticOperationAssignment _ op) = getEvalVar op
-- | Extracts the variables on the RHS of a semantic evaluation.
getEvalVar :: SemanticOperation -> [SemanticIdentifier]
getEvalVar (Variable identifier) = [identifier]
getEvalVar (VariableAccess identifier _) = [identifier]
getEvalVar (Constant _) = []
getEvalVar (Parentheses op) = getEvalVar op
getEvalVar (PrefixExpr _ op) = getEvalVar op
getEvalVar (PostfixExpr _ op) = getEvalVar op
getEvalVar (InfixExpr _ op1 op2) = getEvalVar op1 ++ getEvalVar op2
-- | Gets the variables defined by the sub-evaluations.
getEvaluationVars :: SemanticEvaluationRule -> [SemanticIdentifier]
getEvaluationVars (SemanticEvaluationRule _ _ _ _ evals) = getVar <$> evals
where
getVar (SemanticEvaluation _ ident _) = ident
-- | Checks the semantic form of the semantic evaluation rules.
--
-- This checks the subterm criteria, and also the evaluation list form.
verifySemanticForm
:: VState (SemanticEvaluationRuleList, NTCountMap)
-> VState RuleTag
verifySemanticForm input = do
(rules, nts) <- input
let ntIndexPairs = getNTsFromSubEvaluations <$> rules
subResults <- sequence $ (\(x,_) -> (verifyNonTerminal . return) x) <$>
concat ntIndexPairs
let result = rights $ concat $ fmap (checkNT nts) <$> ntIndexPairs
subResult = foldl tagPlus Terminates subResults
if null result then
return $ Terminates `tagPlus` subResult
else
return $
(DoesNotTerminate $ (resultToErr NonExistentSubterms) <$> result)
`tagPlus` subResult
-- | Converts an error string into a non-termination result with given type.
resultToErr
:: NonTerminationType
-> String
-> (NonTerminationType, [NonTerminal], String)
resultToErr ntType str = (ntType, [], str)
-- | Checks if a non-terminal is defined in the syntax properly.
checkNT :: NTCountMap -> (NonTerminal, Integer) -> Either Bool String
checkNT nts (nt, ix) =
if (nt `elem` M.keys nts) && (ix < M.findWithDefault 0 nt nts) then
Left True
else
Right $ "Non-terminal " ++ show nt ++ " with index " ++ show ix
++ " is not defined in this production."
-- | Gets the non-terminals and their indices used in the sub-evaluations.
getNTsFromSubEvaluations :: SemanticEvaluationRule -> [(NonTerminal, Integer)]
getNTsFromSubEvaluations (SemanticEvaluationRule _ _ _ _ evals) =
concat $ getItems <$> evals
where
getItems (SemanticEvaluation _ _ evalBlock) =
extractEval evalBlock
extractEval (Left (SyntaxAccessBlock nt (SyntaxAccessor ix))) =
[(nt, ix)]
extractEval (Right (SpecialSyntaxRule _ _ args)) =
concat $ extractFromArg <$> args
extractFromArg (Left (SyntaxAccessBlock nt (SyntaxAccessor ix))) =
[(nt, ix)]
extractFromArg (Right _) = [] -- These exist, as checked by the parser.
-- | Checks that the guards are complete across semantic evaluation rules.
--
-- It also checks that the guards only refer to variables defined as part of the
-- sub-evaluations.
-- TODO Complete guard verification.
verifyGuards :: VState SemanticEvaluationRuleList -> VState RuleTag
verifyGuards input = do
guardVariablesComplete <- verifyGuardSubtermVariables input
guardsCompleteOverDomain <- verifyGuardsComplete input
let tests = [guardVariablesComplete, guardsCompleteOverDomain] :: [RuleTag]
return $ foldl tagPlus Terminates tests
-- | Verifies that the patterns in the guards are complete over the domain.
--
-- As this problem is actually
verifyGuardsComplete :: VState SemanticEvaluationRuleList -> VState RuleTag
verifyGuardsComplete input = do
rules <- input
let guards = extractGuards <$> rules
containsCatchallGuard = L.find null guards
case containsCatchallGuard of
Nothing -> return $ DoesNotTerminate [
(
IncompleteGuards,
[],
"Guards must contain a catch-all clause.")
]
Just _ -> return Terminates
-- | Checks that the pattern guards rely only on appropriate variables.
--
-- Such variables should only be defined in the subterm evaluations of the
-- semantic rules.
verifyGuardSubtermVariables
:: VState SemanticEvaluationRuleList
-> VState RuleTag
verifyGuardSubtermVariables input = do
rules <- input
let guards = extractGuards <$> rules :: [[SemanticRestriction]]
guardVars = (L.nub . concatMap extractGuardVars) <$> guards
evalVars <- extractSubtermVariables $ return rules
let groups = zip guardVars evalVars
result = and $ checkExists <$> groups
if result then
return Terminates
else
return $ DoesNotTerminate [
(
IncompleteGuards,
[],
"Guard refers to variables not defined in sub-evaluations."
)
]
where
extractGuardVars :: SemanticRestriction -> [SemanticIdentifier]
extractGuardVars (SemVariable semId) = [semId]
extractGuardVars (SemConstant _) = []
extractGuardVars (SemInfixExpr _ l r) =
extractGuardVars l ++ extractGuardVars r
checkExists :: (Eq a) => ([a], [a]) -> Bool
checkExists ([], _) = True
checkExists (x:xs, ys) = (x `elem` ys) && checkExists (xs, ys)
-- | Extracts the guard patterns from an evaluation rule.
extractGuards :: SemanticEvaluationRule -> [SemanticRestriction]
extractGuards
(SemanticEvaluationRule _ _ _ (SemanticRestrictionList guards) _) = guards
-- | Extracts the target variables from the subterm evaluations.
extractSubtermVariables
:: VState SemanticEvaluationRuleList
-> VState [[SemanticIdentifier]]
extractSubtermVariables input = do
ruleList <- input
let evaluations = extractEvaluations <$> ruleList
return $ fmap extractEvalVars <$> evaluations
where
extractEvalVars (SemanticEvaluation _ var _) = var
extractEvaluations (SemanticEvaluationRule _ _ _ _ evals) = evals
-- | Verifies a syntax alternative where the semantics are composed indirectly.
--
-- In general, the inference is restricted to a single instance of a terminal
-- or non-terminal symbol.
verifySubSemantics :: VState SyntaxAlternative -> VState RuleTag
verifySubSemantics alt = do
(SyntaxAlternative terms _) <- alt
if length terms > 1 then
return $ DoesNotTerminate [
(UnableToInfer, [], "Cannot infer semantics for rule.")
]
else do
let result = (verifySyntaxTerm . return) <$> terms
combineTerminationResults result
-- | Verifies a syntax term.
--
-- Exceptions are treated as purely syntactic, and hence are not verified
-- themselves.
verifySyntaxTerm :: VState SyntaxTerm -> VState RuleTag
verifySyntaxTerm term = do
(SyntaxTerm factor _) <- term
verifySyntaxFactor $ return factor
-- | Verifies a syntactic factor.
--
-- Repetition is purely a syntactic operation and is ignored here.
verifySyntaxFactor :: VState SyntaxFactor -> VState RuleTag
verifySyntaxFactor factor = do
(SyntaxFactor repeat primary) <- factor
case repeat of
(Just _) -> return $ DoesNotTerminate [
(UnableToInfer,
[], "Cannot infer semantics for rule with repetition.")
]
Nothing -> verifySyntaxPrimary $ return primary
-- | Verifies a syntax primary.
verifySyntaxPrimary :: VState SyntaxPrimary -> VState RuleTag
verifySyntaxPrimary primary = do
syntaxPrimary <- primary
case syntaxPrimary of
(SyntaxSpecial _) -> fail "Cannot verify special syntax."
(TerminalProxy _) -> return Terminates
(NonTerminalProxy nonTerminal) -> verifyNonTerminal $ return nonTerminal
_ -> return $
DoesNotTerminate [
(UnableToInfer, [], "Cannot infer semantics for rule.")
]
-- | Verifies a given non-terminal.
--
-- Uses the 'Touched' value constructor and stack value checks to ensure that
-- any mutually-recursive productions can be verified correctly.
verifyNonTerminal :: VState NonTerminal -> VState RuleTag
verifyNonTerminal nt = do
nonTerminal <- nt
prodMap <- gets productions
truths <- gets truths
let ntRule = M.lookup nonTerminal prodMap
if nonTerminal `elem` truths then do
return Terminates
else do
-- Production stack frame
modify (pushProductionFrame nonTerminal)
prodTrace <- gets productionTrace
termResult <- case ntRule of
Nothing -> checkTruthsForTermination nt
Just (tag, body) -> do
case tag of
Terminates -> return tag
(DoesNotTerminate _) -> return tag
-- Only process if there is no termination tag assigned
_ -> do
modify (updateRuleTag Touched nonTerminal)
if nonTerminal `elem` tail prodTrace then
return tag
else do
ntTag <- verifyRule $ return body
modify (updateRuleTag ntTag nonTerminal)
return ntTag
modify popProductionFrame
case termResult of
(DoesNotTerminate xs) ->
return $ DoesNotTerminate $ addTrace nonTerminal <$> xs
other -> return other
where
addTrace nonTerm (termKind, failTrace, msg) =
(termKind, nonTerm : failTrace, msg)
-- | Checks if a given non-terminal terminates in the truths block.
--
-- These truths are the trivial base-cases for the language semantics, and hence
-- are taken as given by the proof engine.
checkTruthsForTermination :: VState NonTerminal -> VState RuleTag
checkTruthsForTermination nt = do
nonTerminal <- nt
semanticTruths <- gets truths
if nonTerminal `elem` semanticTruths then
return Terminates
else
return $ DoesNotTerminate
[
(Incomplete, [],
"No ground truth for " ++ show nonTerminal ++ " and no\
\ corresponding rule defined.")
]
-- | Checks if a given language rule has explicitly defined semantics.
hasSemantics :: SyntaxAlternative -> Bool
hasSemantics (SyntaxAlternative _ lrs) = case lrs of
Just _ -> True
Nothing -> False
-- | Combines a set of subterm termination values into a result value for the
-- term.
combineTerminationResults
:: [VState RuleTag]
-> VState RuleTag
combineTerminationResults results = do
items <- sequence results
return $ foldl tagPlus Terminates items
|
using System.Collections.Generic;
using System.Threading.Tasks;
namespace CovidDataLake.ContentIndexer.Extraction.TableWrappers
{
public interface IFileTableWrapper
{
string Filename { get; set; }
Task<IEnumerable<KeyValuePair<string, IAsyncEnumerable<string>>>> GetColumns();
}
}
|
kubectl patch sts datanode -p '{"metadata":{"finalizers":null}}'
kubectl delete sts datanode --grace-period=0 --force
kubectl patch pod datanode-0 -p '{"metadata":{"finalizers":null}}'
kubectl patch pod datanode-1 -p '{"metadata":{"finalizers":null}}'
kubectl patch pod datanode-2 -p '{"metadata":{"finalizers":null}}'
kubectl delete pods datanode-0 datanode-1 datanode-2 --grace-period=0 --force
|
package cpup.mc.lib.util
import net.minecraft.nbt.{NBTBase, NBTTagCompound, NBTTagList}
import scala.collection.mutable.ListBuffer
import net.minecraftforge.common.util.Constants.NBT
object NBTUtil {
def readList(listNBT: NBTTagList) = {
val list = new ListBuffer[NBTTagCompound]
for(i <- 0 until listNBT.tagCount) {
list += listNBT.getCompoundTagAt(i)
}
list.toList
}
def writeList(list: Seq[NBTBase]) = {
val listNBT = new NBTTagList
list.foreach(listNBT.appendTag)
listNBT
}
def compound(nbt: NBTTagCompound, key: String) = {
if(nbt.hasKey(key, NBT.TAG_COMPOUND)) {
nbt.getCompoundTag(key)
} else {
val c = new NBTTagCompound
nbt.setTag(key, c)
c
}
}
} |
package com.piggymetrics.account;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.circuitbreaker.EnableCircuitBreaker;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.commons.security.AccessTokenContextRelay;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.Bean;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableOAuth2Client;
@SpringBootApplication
@EnableDiscoveryClient
@EnableOAuth2Client
@EnableFeignClients
@EnableGlobalMethodSecurity(prePostEnabled = true)
public class AccountApplication {
@Bean
public AccessTokenContextRelay accessTokenContextRelayBean() {
return new AccessTokenContextRelay(null);
}
public static void main(String[] args) {
SpringApplication.run(AccountApplication.class, args);
}
}
|
using System;
using System.Collections.Generic;
namespace csbcgf
{
public abstract class TargetlessSpellCardComponent : CardComponent, ITargetlessSpellCardComponent
{
public TargetlessSpellCardComponent(int mana) : base(mana)
{
}
protected TargetlessSpellCardComponent(ManaCostStat manaCostStat,
List<IReaction> reactions)
: base(manaCostStat, reactions)
{
}
public abstract void Cast(IGame game);
}
}
|
/* eslint-disable react-hooks/exhaustive-deps */
import { useEffect, useState } from 'react';
import Prismic from '@prismicio/client';
import { toast } from 'react-toastify';
import { getPrismicClient } from '../../services/prismic';
import { ProjectsCard } from '../../components/reusable/ProjectsCard';
import { LoadingScreen } from '../../components/LoadingScreen';
import { ProjetosContainer } from './styles';
type getProjectsApiPrismicProps = {
slug: string;
title: string;
type: string;
thumbnail: string;
}
export function Projetos() {
const [projects, setProjects] = useState<getProjectsApiPrismicProps[]>([]);
const [loading, setLoading] = useState(true);
const prismic = getPrismicClient();
useEffect(() => {
async function getProjectsApiPrismic() {
try {
setLoading(true);
const projectResponse = await prismic.query(
[Prismic.Predicates.at('document.type', 'projeto')],
{ orderings: '[document.first_publication_date desc]' },
);
const projectFormated = projectResponse.results.map(project => ({
slug: project.uid as string,
title: project.data.title as string,
type: project.data.type as string,
thumbnail: project.data.thumbnail.url as string,
}));
setProjects(projectFormated);
setLoading(false);
}
catch (err) {
toast.warn('Não foi possível carregar as informações. Tente novamente');
}
}
getProjectsApiPrismic();
}, []);
return (
<ProjetosContainer>
{loading && (
<div className='carrying'>
<LoadingScreen /> <p>Carregando...</p>
</div>
)}
<div>
{projects.map((project, key) => (
<ProjectsCard
key={key}
title={project.title}
type={project.type}
slug={project.slug}
imgUrl={project.thumbnail}
/>
))}
</div>
</ProjetosContainer>
);
} |
/**
* @license Copyright (c) 2003-2022, CKSource - Holding sp. z o.o. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/* globals document */
// Sometimes the request to external resources (like `badge.fury.io` or `emojics.com`) fails for unknown reasons,
// so ignore all navigation timeouts for framework integration docs.
const metaElement = document.createElement( 'meta' );
metaElement.name = 'x-cke-crawler-ignore-patterns';
metaElement.content = JSON.stringify( {
'navigation-error': 'timeout'
} );
document.head.appendChild( metaElement );
|
<?php
declare(strict_types=1);
/**
* Highlighter
*
* Copyright (C) 2016, Some right reserved.
*
* @author Kacper "Kadet" Donat <[email protected]>
*
* Contact with author:
* Xmpp: [email protected]
* E-mail: [email protected]
*
* From Kadet with love.
*/
namespace Kadet\Highlighter\bin\Commands\Benchmark;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Helper\Table;
use Symfony\Component\Console\Helper\TableCell;
use Symfony\Component\Console\Helper\TableSeparator;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class AnalyzeCommand extends Command
{
protected function execute(InputInterface $input, OutputInterface $output)
{
/**
* @noinspection PhpComposerExtensionStubsInspection
* Adding ext-json as dev-only dependency still doesn't
* prevent this issue so we have to ignore this inspection
*/
$json = json_decode(file_get_contents($input->getArgument('input')[0]), true);
$output->writeln(sprintf(
"Date: <info>%s</info> Formatter: <info>%s</info>, Comment: <info>%s</info>",
date('d.m.Y H:i:s', $json['timestamp']),
$json['formatter'],
isset($json['comment']) ? $json['comment'] : 'none'
));
$table = new Table($output);
$suffix = $input->getOption('relative') ? 'bytes/s' : 'ms';
$table->addRow(['set', "min [$suffix]", "avg [$suffix]", "max [$suffix]", "std dev [$suffix]"]);
$summary = [];
foreach ($json['results'] as $file => $data) {
$this->separator($file, $table);
foreach ($data['times'] as $set => $times) {
$result = array_map(function ($time) use ($data, $input) {
return $input->getOption('relative') ? $data['size'] / $time : $time * 1000;
}, $times);
$this->entry($result, $set, $table);
$summary[$set][] = array_sum($result) / count($result);
}
if (!isset($data['memory'])) {
continue;
}
foreach ($data['memory'] as $set => $memory) {
$result = array_map(function ($memory) use ($data, $input) {
$bytes = $input->getOption('relative') ? $memory / $data['size'] : $memory;
return $this->formatBytes($bytes);
}, $memory);
$this->entry($result, $set, $table);
}
}
if (!$input->hasParameterOption('--summary')) {
$table->render();
}
$summary = array_filter($summary, function ($key) use ($input) {
return fnmatch($input->getOption('summary') ?: '*', $key);
}, ARRAY_FILTER_USE_KEY);
$max = max(array_map('strlen', array_keys($summary)));
foreach ($summary as $name => $set) {
$output->writeln(sprintf(
"<comment>%s</comment> %s %s",
str_pad($name, $max, ' ', STR_PAD_LEFT),
$this->format($input->getOption('relative') ? array_sum($set) / count($set) : array_sum($set)),
$suffix
));
}
}
protected function separator($file, Table $table)
{
$table->addRows([
new TableSeparator(),
[new TableCell($file, ['colspan' => 5])],
new TableSeparator(),
]);
}
protected function entry($result, $set, Table $table)
{
$min = min($result);
$avg = $this->avarage($result);
$max = max($result);
$dev = $this->stddev($result);
$table->addRow([
$set,
$this->format($min),
$this->format($avg),
$this->format($max),
sprintf("%s (%d%%)", $this->format($dev), $dev / $avg * 100),
]);
}
private function format($number)
{
return is_numeric($number) ? number_format((float) $number, 2) : $number;
}
private function avarage(array $result)
{
return array_sum($result) / count($result);
}
private function stddev($result)
{
$mean = array_sum($result) / count($result);
return sqrt(array_sum(array_map(function ($result) use ($mean) {
return pow((float) $result - $mean, 2);
}, $result)) / count($result));
}
private function formatBytes($bytes)
{
$units = array('B', 'KB', 'MB', 'GB', 'TB');
$bytes = max($bytes, 0);
$pow = floor(($bytes ? log($bytes) : 0) / log(1024));
$pow = min($pow, count($units) - 1);
$bytes /= (1 << (10 * $pow));
return $this->format($bytes);
}
protected function configure()
{
$this
->setName('benchmark:analyze')
->setDescription('Tests performance of KeyLighter')
->addArgument('input', InputArgument::IS_ARRAY | InputArgument::REQUIRED, 'Input JSON file(s)')
->addOption('relative', 'r', InputOption::VALUE_NONE, 'Show relative times?')
->addOption('summary', 'u', InputOption::VALUE_OPTIONAL, 'Show summary times?', '*');
}
}
|
"""
Sally averages 5 strokes a hole when she plays golf. One day, she
took 40 strokes to complete the first nine holes. Her partner conjectured
that she would probably regress to the mean and take 50 strokes to complete
the next nine holes. Do you agree with her partner?
"""
"""
No -
Regression to the mean would imply that the likelihood of the next 5 strokes
in each set of 50 strokes is more likely to be a hole, not necessarily that 50
strokes would more likely result in 10 holes.
"""
|
# FormSelect
We use [Ember Power Select](https://github.com/cibernox/ember-power-select/)
under the hood to deliver the best select experience. Please refer to their
documentation for further customization.
In the example below, we have a label, hint, error validation, search and `allowClear`.
## API
<ArgsTable @of="FormSelect" />
|
require 'spec_helper'
describe 'toughen::cron' do
context 'with default params' do
it { should contain_file('/etc/cron.allow').with_content(/^root$/) }
end
context 'with additional users' do
let :params do { :allow_users => ['root','test'] } end
it { should contain_file('/etc/cron.allow').with_content(/^test$/) }
end
end
|
import React from 'react'
import constant from 'lodash/constant'
export default function Constant(props) {
return props.children(constant(props.value))
}
Constant.defaultProps = {
children: value => value
} |
mod message;
mod message_header;
pub mod payload;
pub use self::message::{Message, to_raw_message};
pub use self::message_header::MessageHeader;
pub use self::payload::Payload;
|
package com.coderefer.pawz.views
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Surface
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
// Start building your app here!
@Composable
fun PawzMain(toggleTheme: ()->Unit) {
Surface(color = MaterialTheme.colors.background) {
Text(text = "Ready... Set... GO!")
}
} |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage.color import label2rgb
from skimage.color import rgb2hsv
from skimage.color import rgb2lab
from skimage.color import lab2rgb
from skimage.filters import threshold_otsu
from skimage.morphology import closing
from skimage.morphology import dilation
from skimage.morphology import disk
from skimage.morphology import erosion
from skimage.morphology import opening
from skimage.measure import label, regionprops
from skimage.segmentation import clear_border
def get_channel_hsv(hsv_image, channel="saturation"):
"""Get only particular channel values from hsv image
Parameters
----------
hsv_image: np.unit8 image
Input hsv image
channel: string
'hue'/'saturation'/'value'
"""
assert channel in ["hue", "saturation", "value"], "Unkown channel specified"
if channel == "hue":
return hsv_image[:, :, 0]
if channel == "saturation":
return hsv_image[:, :, 1]
if channel == "value":
return hsv_image[:, :, 2]
def _get_kernel(kernel_size, use_disk=True):
if not use_disk:
kernel = np.ones((kernel_size, kernel_size), np.uint8)
else:
kernel = disk(kernel_size)
return kernel
def imerode(image, kernel_size=5, use_disk=True):
"""Erode an image.
Parameters
----------
image: array_like
np.uint8 binary thresholded image
kernel_size: int
Integer
use_disk: bool
Use disk instead of a NxN kernel
Returns
-------
eroded: array_like
np.uint8 eroded image
"""
kernel = _get_kernel(kernel_size, use_disk)
eroded = erosion(image, kernel)
return eroded
def imdilate(image, kernel_size=5, use_disk=True):
"""Dilate an image
Parameters
----------
image: array_like
np.uint8 binary thresholded image
kernel_size: int
Integer
Returns
-------
dilated: array_like
np.uint8 eroded image
"""
kernel = _get_kernel(kernel_size, use_disk)
dilated = dilation(image, kernel)
return dilated
def imopening(image, kernel_size=5, use_disk=True):
"""Open an image.
Parameters
----------
image: array_like
np.uint8 binary thresholded image
kernel_size: int
Integer
Returns
-------
opened: array_like
np.uint8 opened
"""
kernel = _get_kernel(kernel_size, use_disk)
opened = opening(image, kernel)
return opened
def imclosing(image, kernel_size=5, use_disk=True):
"""Close an image.
Parameters
----------
image: array_like
np.uint8 binary thresholded image
kernel_size: int
Integer
Returns
-------
closed: array_like
np.uint8 opened
"""
kernel = _get_kernel(kernel_size, use_disk)
closed = closing(image, kernel)
return closed
def open_close(image, open_kernel_size=5, close_kernel_size=5, use_disk=True):
"""Open followed by closing an image.
Parameters
----------
image: array_like
np.uint8 binary thresholded image
open_kernel_size: int
Integer
close_kernel_size: int
Integer
Returns
-------
closed: array_like
np.uint8 opened-closed
"""
opened = imopening(image, open_kernel_size, use_disk)
closed = imclosing(opened, close_kernel_size, use_disk)
return closed
def close_open(image, open_kernel_size=5, close_kernel_size=5, use_disk=True):
"""Close followed by opening an image.
Parameters
----------
image: array_like
np.uint8 binary thresholded image
open_kernel_size: int
Integer
close_kernel_size: int
Integer
Returns
-------
closed: array_like
np.uint8 opened-closed
"""
closed = imclosing(image, close_kernel_size, use_disk)
opened = imopening(closed, open_kernel_size, use_disk)
return opened
def otsu_thresholding(
rgb_image,
channel="saturation",
open_kernel_size=5,
close_kernel_size=5,
use_disk=True,
):
"""Perform OTSU thresholding followed by closing-then-opening
rgb_image: np.uint8
Input RGB image
channel: string
Channel on which to perform thresholding
open_kernel_size: int
Size of opening kernel
close_kernel_size: int
Size of closing kernel
use_disk: bool
Should use disk instead of a square
"""
hsv_image = rgb2hsv(rgb_image)
hsv_image = np.array(hsv_image)
hsv_ch = get_channel_hsv(hsv_image, channel)
otsu = threshold_otsu(hsv_ch)
thresholded = hsv_ch > otsu
close_then_open = close_open(
thresholded, open_kernel_size, close_kernel_size, use_disk
)
assert close_then_open.dtype == bool, "Mask not boolean"
return close_then_open
def contours_and_bounding_boxes(bw_image, rgb_image):
"""Extract contours and bounding_boxes.
Parameters
----------
bw_image: np.uint8
Input thresholded image
rgb_image: np.uint8
Input rgb image
Returns
-------
image_label_overlay: label
"""
cleared = clear_border(bw_image)
label_image = label(cleared)
image_label_overlay = label2rgb(label_image, image=rgb_image)
bounding_boxes = []
for region in regionprops(label_image):
minr, minc, maxr, maxc = region.bbox
box = ((minc, minr), maxc - minc, maxr - minr)
bounding_boxes.append(box)
return image_label_overlay, bounding_boxes
def plot_contours(bw_image, rgb_image, ax=None):
"""Plot contours over a otsu thresholded binary image.
Parameters
----------
bw_image: np.uint8
Input
"""
image_label_overlay, bounding_boxes = contours_and_bounding_boxes(
bw_image, rgb_image
)
if not ax:
fig, ax = plt.subplots(figsize=(10, 6))
else:
fig = ax.get_figure()
ax.imshow(rgb_image)
for xy, width, height in bounding_boxes:
rect = mpatches.Rectangle(
xy, width, height, fill=False, edgecolor="red", linewidth=2
)
ax.add_patch(rect)
ax.set_axis_off()
fig.tight_layout()
return ax, bounding_boxes
|
<?php
/**
* Created by IntelliJ IDEA.
* User: gabrielgagno
* Date: 5/16/16
* Time: 1:52 PM
*/
return array(
'pdo_drivers' => array(
'mysql' => 'pdo_mysql'
)
); |
import { isString } from './utils';
import type { Template, TemplateMessage, TemplatePlural } from './types';
import { TemplateType } from './types';
const VAR_KEY = 'var';
const WRAPPER = '%%%';
const TYPES = 'num|number|plural|date';
const PLURAL_RILES = '=0|zero|one|two|few|many|other';
const PLURAL_ALL_REGEXP = new RegExp(`((${PLURAL_RILES})\\s{)`, 'g');
const PLURAL_ITEM_REGEXP = new RegExp(`^(${PLURAL_RILES})\\s{(.+?)}$`);
const TAG_ALL_REGEXP = /(<(\w+?)>(.+?)<\/\w+?>|<(\w+?)(| )\/>)/g;
const TAG_ITEM_REGEXP = /^<(\w+?)(?:(?:| )\/>|>(.+?)<\/\w+?>)$/;
const TPL_ALL_REGEXP = new RegExp(`{(\\w+?)(, (${TYPES})(, ((${PLURAL_RILES}) {.+?}+|\\w+?)|)|)}`, 'g');
const TPL_ITEM_REGEXP = new RegExp(`^{(\\w+?)(?:, (${TYPES})(?:, ((?:${PLURAL_RILES}) {.+?}+|\\w+?)|)|)}$`);
function getFormatType(source?: string): TemplateType | undefined {
if (source === 'num' || source === 'number') return TemplateType.number;
if (source === 'plural') return TemplateType.plural;
if (source === 'date') return TemplateType.date;
return undefined;
}
function splitMessage(
message: string,
matches: RegExpMatchArray,
mapFunc: (match: string, val: string) => string | Template,
): TemplateMessage {
return matches
.reduce((acc, match, idx) => acc.replace(match, `${WRAPPER}${VAR_KEY}${idx}${WRAPPER}`), message)
.split(WRAPPER)
.filter(val => val !== '')
.map((val: string) => {
const m = val.match(new RegExp(`^${VAR_KEY}(\\d+?)$`));
return m && m[1] ? mapFunc(matches[m[1]], val) : val;
});
}
function trimArray(arr: Template): Template {
return arr.reduceRight((acc: unknown[], item) => (
(!item && !acc.length) ? acc : [item, ...acc]
), []) as Template;
}
function isRuleMatch(match: RegExpMatchArray | null): match is [string, Intl.LDMLPluralRule, string] {
return match !== null && match.length === 3;
}
function getTemplate(match: RegExpMatchArray | null): (false | [string, string?, string?]) {
return match !== null && [...match].filter(Boolean).slice(1) as [string, string?, string?];
}
function parsePlural(value: string): Readonly<TemplatePlural> {
return value
.replace(PLURAL_ALL_REGEXP, '\n$1')
.split('\n')
.map(val => val.trim().match(PLURAL_ITEM_REGEXP))
.filter(isRuleMatch)
.reduce<TemplatePlural>((acc, item) => {
acc[item[1]] = parser(item[2]);
return acc;
}, {});
}
function parseValue(type?: TemplateType, value?: string): string | Readonly<TemplatePlural> | undefined {
return type === TemplateType.plural && value
? parsePlural(value)
: value;
}
export function parser(message: string): TemplateMessage {
const result: TemplateMessage = [message]
.reduce<(string | Template)[]>((acc, value) => {
const matches = value.match(TAG_ALL_REGEXP);
acc.push(
...(matches
? splitMessage(message, matches, (match: string, val: string) => {
const tpl = getTemplate(match.match(TAG_ITEM_REGEXP));
return tpl ? trimArray([tpl[0], TemplateType.tag, tpl[1] && parser(tpl[1])]) : val;
})
: [value]),
);
return acc;
}, [])
.reduce<(string | Template)[]>((acc, value) => {
const matches = isString(value) && value.match(TPL_ALL_REGEXP);
acc.push(
...(matches
? splitMessage(value as string, matches, (match: string, val: string) => {
const tpl = getTemplate(match.match(TPL_ITEM_REGEXP));
if (!tpl) return val;
const type = getFormatType(tpl[1]);
return trimArray([tpl[0], type, parseValue(type, tpl[2])]);
})
: [value]),
);
return acc;
}, []);
return (result.length === 1 && typeof result[0] === 'string') ? result[0] : result;
}
|
package gocoder
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/orcaman/concurrent-map"
)
type GoPackageOption func(*GoPackage) error
type GoPackage struct {
options *Options
pkgPath string
pkgDir string
goFiles cmap.ConcurrentMap // map[string]*GoFile
files []string
inGoRoot bool
}
func NewGoPackage(pkgPath string, options ...Option) (goPackage *GoPackage, err error) {
pkg := &GoPackage{
pkgPath: pkgPath,
options: &Options{},
goFiles: cmap.New(),
}
if err = pkg.options.init(options...); err != nil {
return
}
if len(pkg.options.GoPath) == 0 {
pkg.options.Fallback(OptionGoPath(os.Getenv("GOPATH")))
}
if len(pkg.options.GoRoot) == 0 {
goroot := ""
goroot, err = execCommand("go", "env", "GOROOT")
if err != nil {
return
}
pkg.options.Fallback(OptionGoRoot(goroot))
}
fiInGoPath, _ := os.Stat(filepath.Join(pkg.options.GoPath, "/src/", pkg.pkgPath))
fiInGoRoot, _ := os.Stat(filepath.Join(pkg.options.GoRoot, "/src/", pkg.pkgPath))
if fiInGoPath != nil {
pkg.pkgDir = filepath.Join(pkg.options.GoPath, "/src/", pkg.pkgPath)
} else if fiInGoRoot != nil {
pkg.pkgDir = filepath.Join(pkg.options.GoRoot, "/src/", pkg.pkgPath)
pkg.inGoRoot = true
} else {
return nil, fmt.Errorf("package %s not exist in GOPATH and GOROOT", pkgPath)
}
fi, err := os.Stat(pkg.pkgDir)
if err != nil {
return
}
if !fi.IsDir() {
err = fmt.Errorf("package path of %s is not a dir", pkg.pkgDir)
return
}
if err = pkg.load(); err != nil {
return
}
if err = pkg.checkCircularImport(); err != nil {
return
}
goPackage = pkg
return
}
func (p *GoPackage) Name() string {
return filepath.Base(p.pkgPath)
}
func (p *GoPackage) InGoRoot() bool {
return p.inGoRoot
}
func (p *GoPackage) Options() Options {
return *p.options
}
func (p *GoPackage) Path() string {
return p.pkgPath
}
func (p *GoPackage) PackageDir() string {
return p.pkgDir
}
func (p *GoPackage) checkCircularImport() (err error) {
return
}
func (p *GoPackage) NumFile() int {
return len(p.files)
}
func (p *GoPackage) File(i int) *GoFile {
filename := p.files[i]
gf, exist := p.goFiles.Get(filename)
if exist {
return gf.(*GoFile)
}
opts := p.options.Copy()
opts = append(opts, OptionGoPackage(p))
gofile, err := NewGoFile(filename, opts...)
if err != nil {
panic(err)
}
if !p.goFiles.SetIfAbsent(filename, gofile) {
gf, _ := p.goFiles.Get(filename)
return gf.(*GoFile)
}
return gofile
}
func (p *GoPackage) NumFuncs() int {
num := 0
for i := 0; i < p.NumFile(); i++ {
num += p.File(i).NumFuncs()
}
return num
}
func (p *GoPackage) Func(funcIndex int) *GoFunc {
for i := 0; i < p.NumFile(); i++ {
max := p.File(i).NumFuncs()
if funcIndex >= max {
funcIndex -= max
continue
}
return p.File(i).Func(funcIndex)
}
return nil
}
func (p *GoPackage) NumTypes() int {
num := 0
for i := 0; i < p.NumFile(); i++ {
num += p.File(i).NumTypes()
}
return num
}
func (p *GoPackage) Type(typeIndex int) *GoExpr {
for i := 0; i < p.NumFile(); i++ {
max := p.File(i).NumTypes()
if typeIndex >= max {
typeIndex -= max
continue
}
return p.File(i).Type(typeIndex)
}
return nil
}
func (p *GoPackage) NumVars() int {
num := 0
for i := 0; i < p.NumFile(); i++ {
num += p.File(i).NumVars()
}
return num
}
func (p *GoPackage) Var(varIndex int) *GoExpr {
for i := 0; i < p.NumFile(); i++ {
max := p.File(i).NumVars()
if varIndex >= max {
varIndex -= max
continue
}
return p.File(i).Var(varIndex)
}
return nil
}
func (p *GoPackage) FindType(typeName string) (goType *GoExpr, exist bool) {
for i := 0; i < p.NumFile(); i++ {
goType, exist = p.File(i).FindType(typeName)
if exist {
return
}
}
return
}
func (p *GoPackage) FindFunc(funcName string) (fn *GoFunc, exist bool) {
for i := 0; i < p.NumFuncs(); i++ {
if p.Func(i).Name() == funcName {
return p.Func(i), true
}
}
return nil, false
}
func (p *GoPackage) load() error {
files, err := ioutil.ReadDir(p.pkgDir)
if err != nil {
return err
}
for i := 0; i < len(files); i++ {
if files[i].IsDir() {
continue
}
if strings.HasPrefix(files[i].Name(), ".") {
continue
}
if strings.HasSuffix(files[i].Name(), "_test.go") {
continue
}
if filepath.Ext(files[i].Name()) != ".go" {
continue
}
p.files = append(p.files, filepath.Join(p.pkgDir, files[i].Name()))
}
return nil
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using TEP.Application.Common.Interfaces;
using TEP.Application.Common.Models;
using TEP.Infra.AuthProvider.Exceptions;
using TEP.Shared;
namespace TEP.Infra.AuthProvider
{
public class IdentityService : IIdentityService
{
public Task<ServiceResponse<string>> CreateUserAsync(string userName, string password)
{
throw new NotImplementedException();
}
public Task<ServiceResponse<bool>> DeleteUserAsync(string userId)
{
throw new NotImplementedException();
}
public async Task<ServiceResponse<ApplicationUser>> GetUserAsync(string userId)
{
var id = Int32.Parse(userId);
var users = new List<ApplicationUser>
{
new ApplicationUser { Id = 1, Username = "rico", Password = "r1c0", Role = UserRoles.Manager },
new ApplicationUser { Id = 2, Username = "tom", Password = "mot", Role = UserRoles.Admin },
new ApplicationUser { Id = 3, Username = "joao", Password = "jonh", Role = UserRoles.Operator }
};
return new ServiceResponse<ApplicationUser>()
{
Data = users.Where(x => x.Id == id).FirstOrDefault(),
Success = true
};
}
public async Task<ServiceResponse<string>> GetUserNameAsync(string userId)
{
var id = Int32.Parse(userId);
var users = new List<ApplicationUser>
{
new ApplicationUser { Id = 1, Username = "rico", Password = "r1c0", Role = UserRoles.Manager },
new ApplicationUser { Id = 2, Username = "tom", Password = "mot", Role = UserRoles.Admin },
new ApplicationUser { Id = 3, Username = "joao", Password = "jonh", Role = UserRoles.Operator }
};
return new ServiceResponse<string>()
{
Data = users.Where(x => x.Id == id).FirstOrDefault().Username,
Success = true
};
}
public async Task<ServiceResponse<ApplicationUser>> ValidateLoginAsync(string userName, string password)
{
var users = new List<ApplicationUser>
{
new ApplicationUser { Id = 1, Username = "rico", Password = "r1c0", Role = UserRoles.Manager },
new ApplicationUser { Id = 2, Username = "tom", Password = "mot", Role = UserRoles.Admin },
new ApplicationUser { Id = 3, Username = "joao", Password = "jonh", Role = UserRoles.Operator }
};
if (!users.Where(x => x.Username == userName && x.Password == password).Any())
{
throw new InvalidUserException("Invalid Username and/or password.");
}
return new ServiceResponse<ApplicationUser>()
{
Data = users.Where(x => x.Username == userName && x.Password == password).FirstOrDefault(),
Success = true
};
}
}
}
|
package ca.ualberta.cs.models;
/**
*
* @author wyatt
*
* Stores the last list to be selected
*
* @param <T>
*/
abstract public class SelectedPostModelList<T extends PostModel> {
protected PostModelList<T> theList = null;
protected void setSelectedPostModelList(PostModelList<T> theList) {
this.theList = theList;
}
protected PostModelList<T> getSelectedPostModelList() {
return this.theList;
}
}
|
module SynapseClient
class QuestionSet
attr_accessor :id, :questions
def initialize(opts={})
@id = opts.id
@questions = opts.questions.map{|q| Question.new(q)}
end
def successful?
true
end
#
class Question
attr_reader :id, :question, :answers
def initialize(opts = {})
@id = opts.id
@question = opts.question
@answers = opts.answers.map{|a| Answer.new(a)}
end
end
#
class Answer
attr_reader :id, :answer
def initialize(opts = {})
@id = opts.id
@answer = opts.answer
end
end
end
end
|
# explo-cli
前端工程自动化构建工具
该构建模版可选择react.js或vue.js的PC端模版, react.js相关的移动端模版
* 安装
```
npm install explo-cli -g / yarn global add explo-cli
```
* 用法
* 初始化项目模版
```
explo init <projectName>
```
或者
```
explo i <projectName>
```
* 版本号查看
```
explo -v
```
* 本地当前目录测试全局安装
```
yarn link
```
## 前端自动化构建目标期望
快速生成新项目的目录模板,目录结构是每个项目统一个模版规范(目录规范),设定通用的配置如下:
1. 通用的Webpack配置
2. 统一的Eslint 校验规则eslintConfig)
3. 统一的单元测试框架配置:单元测试覆盖率、测试的目录等
4. 统一的Dockerfile和jenkinsfile (用来打包成镜像和部署流水线定义)
5. 统一babel的配置(.babelrc或babel.config.js)
6. 统一的常量配置(缓存字段等等)不同环境的配置文件(development、test、production)
脚手架起到一个至关重要的角色,通过脚手架来约束好规范,统一的配置,来打通新项目的开发工具链,一方面提升开发效率,一方面则提高项目对接可维护性及新员工熟悉项目简易性。
开发一个高度可定制化的脚手架,需要考虑的因素很多。 explo-cli通常准备3个模版:
1.pc端react模版; 2. mobile端的react模版; 3.pc端vue模版
然后用git管理起来,我需要如下工具: \
1) 用于控制台选择的工具:inquirer \
2) 处理控制台命令的工具:commander \
3) 可改变输出log颜色的工具:chalk \
4) 可执行shell命令的工具: child_process
**前端自动化脚手架说明**
explore.js文件的第一行,一定是第一行,添加了
`#!/usr/bin/env node`
代码,指定了我们脚本的运行环境,相当于运行explo命令的时候添加了node命令作为前缀,即实际运行的是node explo
## 前端自动化构建流程
1. 命令入口 package.json
2. commander处理子命令,inquirer解析命令参数
3. 校验新建项目文件夹名,判断覆盖、新建和退出
4. 获取远程项目模版信息列表
5. 选择所需远程模板
6. 输入模板远程仓库中你所需的分支,默认是 master
7. 下载模板至本地模板库
8. 把本地模版资源文件拷贝至项目文件夹
9. 修改项目模板中package.json的用户相关信息
10. 对项目进行git初始化
11. 安装项目依赖包,构建完毕,进入开发阶段
|
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.ui.metadata2;
import com.lightcrafts.image.ImageInfo;
import com.lightcrafts.image.metadata.ImageMetadata;
import com.lightcrafts.image.metadata.ImageMetadataDirectory;
import com.lightcrafts.ui.LightZoneSkin;
import static com.lightcrafts.ui.metadata2.Locale.LOCALE;
import com.lightcrafts.ui.toolkit.PaneTitle;
import javax.swing.*;
import javax.swing.table.TableCellEditor;
import java.awt.*;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* A vertical box holding MetadataSectionTables, or specialized error
* components if there is trouble with metadata.
*/
public class MetadataStack extends JPanel implements Scrollable {
private List<MetadataTable> tables;
// A flag telling if we're showing one of our error messages.
// (Needed for the Scrollable implementation.)
private boolean error;
public MetadataStack(ImageInfo info) {
tables = new LinkedList<MetadataTable>();
setImage(info);
setBackground(LightZoneSkin.Colors.ToolPanesBackground);
setOpaque(true);
}
public void setImage(ImageInfo info) {
removeAll();
setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
tables.clear();
add(new PaneTitle(LOCALE.get("MetadataTitle")));
ImageMetadata meta = null;
try {
meta = info.getMetadata();
}
catch (Throwable t) {
// BadImageFileException, IOException, UnknownImageTypeException
t.printStackTrace();
}
if (meta == null) {
String no = LOCALE.get("NoLabel");
JLabel label = new JLabel(no);
label.setAlignmentX(.5f);
add(Box.createVerticalGlue());
add(label);
add(Box.createVerticalGlue());
error = true;
return;
}
Collection<ImageMetadataDirectory> directories =
meta.getDirectories();
if (directories.isEmpty()) {
String empty = LOCALE.get("EmptyLabel");
JLabel label = new JLabel(empty);
label.setAlignmentX(.5f);
add(Box.createVerticalGlue());
add(label);
add(Box.createVerticalGlue());
error = true;
return;
}
error = false;
MetadataPresentation present = new MetadataPresentation();
List<MetadataSection> sections = present.getSections();
for (MetadataSection section : sections) {
MetadataTableModel model =
new MetadataTableModel(info, meta, section);
MetadataTable table = new MetadataTable(model);
tables.add(table);
DefaultButtons buttons = new DefaultButtons(table, meta);
Box control = Box.createHorizontalBox();
control.add(table);
control.add(buttons);
add(control);
add(Box.createVerticalStrut(4));
add(new JSeparator());
add(Box.createVerticalStrut(4));
}
add(Box.createVerticalGlue());
}
boolean isEditing() {
for (MetadataTable table : tables) {
if (table.isEditing()) {
return true;
}
}
return false;
}
void endEditing() {
for (MetadataTable table : tables) {
if (table.isEditing()) {
TableCellEditor editor = table.getCellEditor();
editor.stopCellEditing();
}
}
}
public boolean getScrollableTracksViewportHeight() {
return false;
}
public boolean getScrollableTracksViewportWidth() {
return false;
}
public Dimension getPreferredScrollableViewportSize() {
return getPreferredSize();
}
public int getScrollableBlockIncrement(
Rectangle visibleRect, int orientation, int direction
) {
// If we have any JTables, then defer to one of them:
Component[] comps = getComponents();
for (Component comp : comps) {
if (comp instanceof JTable) {
JTable table = (JTable) comp;
return table.getScrollableBlockIncrement(
visibleRect, orientation, direction
);
}
}
return 1;
}
public int getScrollableUnitIncrement(
Rectangle visibleRect, int orientation, int direction
) {
// If we have any JTables, then defer to one of them:
Component[] comps = getComponents();
for (Component comp : comps) {
if (comp instanceof JTable) {
JTable table = (JTable) comp;
return table.getScrollableUnitIncrement(
visibleRect, orientation, direction
);
}
}
return 1;
}
}
|
using System;
using BeachHacks.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.Extensions.Configuration;
using System.Configuration;
namespace BeachHacks.DAL
{
public class PolitiFactContext : DbContext
{
public PolitiFactContext()
{
}
public PolitiFactContext(DbContextOptions<PolitiFactContext> options)
: base(options)
{
}
public virtual DbSet<Categories> Categories { get; set; }
public virtual DbSet<Entities> Entities { get; set; }
public virtual DbSet<Politicalparty> Politicalparty { get; set; }
public virtual DbSet<Presidentialcandidate> Presidentialcandidate { get; set; }
public virtual DbSet<Tweet> Tweet { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.HasAnnotation("ProductVersion", "2.2.4-servicing-10062");
modelBuilder.Entity<Categories>(entity =>
{
entity.HasKey(e => e.CategoryId)
.HasName("categories_pkey");
entity.ToTable("categories");
entity.Property(e => e.CategoryId).HasColumnName("category_id");
entity.Property(e => e.Confidence)
.HasColumnName("confidence")
.HasColumnType("numeric");
entity.Property(e => e.TweetId).HasColumnName("tweet_id");
entity.HasOne(d => d.Tweet)
.WithMany(p => p.Categories)
.HasForeignKey(d => d.TweetId)
.OnDelete(DeleteBehavior.ClientSetNull)
.HasConstraintName("categories_tweet_id_fkey");
});
modelBuilder.Entity<Entities>(entity =>
{
entity.HasKey(e => e.EntityId)
.HasName("entities_pkey");
entity.ToTable("entities");
entity.Property(e => e.EntityId).HasColumnName("entity_id");
entity.Property(e => e.Salience)
.HasColumnName("salience")
.HasColumnType("numeric");
entity.Property(e => e.SentimentMag).HasColumnName("sentiment_mag");
entity.Property(e => e.SentimentScore).HasColumnName("sentiment_score");
entity.Property(e => e.TweetId).HasColumnName("tweet_id");
entity.Property(e => e.Type)
.HasColumnName("type")
.HasMaxLength(100);
entity.HasOne(d => d.Tweet)
.WithMany(p => p.Entities)
.HasForeignKey(d => d.TweetId)
.OnDelete(DeleteBehavior.ClientSetNull)
.HasConstraintName("entities_tweet_id_fkey");
});
modelBuilder.Entity<Politicalparty>(entity =>
{
entity.ToTable("politicalparty");
entity.Property(e => e.PoliticalPartyId).HasColumnName("political_party_id");
entity.Property(e => e.PartyName)
.HasColumnName("party_name")
.HasMaxLength(50);
});
modelBuilder.Entity<Presidentialcandidate>(entity =>
{
entity.HasKey(e => e.UserId)
.HasName("presidentialcandidate_pkey");
entity.ToTable("presidentialcandidate");
entity.HasIndex(e => e.Name)
.HasName("presidentialcandidate_name_key")
.IsUnique();
entity.Property(e => e.UserId).HasColumnName("user_id");
entity.Property(e => e.Age).HasColumnName("age");
entity.Property(e => e.Name)
.IsRequired()
.HasColumnName("name")
.HasMaxLength(50);
entity.Property(e => e.PoliticalPartyId).HasColumnName("political_party_id");
entity.Property(e => e.State)
.IsRequired()
.HasColumnName("state")
.HasMaxLength(2);
entity.HasOne(d => d.PoliticalParty)
.WithMany(p => p.Presidentialcandidate)
.HasForeignKey(d => d.PoliticalPartyId)
.OnDelete(DeleteBehavior.ClientSetNull)
.HasConstraintName("presidentialcandidate_political_party_id_fkey");
});
modelBuilder.Entity<Tweet>(entity =>
{
entity.ToTable("tweet");
entity.Property(e => e.TweetId).HasColumnName("tweet_id");
entity.Property(e => e.PoliticalCandidate).HasColumnName("political_candidate");
entity.Property(e => e.Text)
.IsRequired()
.HasColumnName("text")
.HasMaxLength(500);
entity.Property(e => e.Time).HasColumnName("time");
entity.Property(e => e.TwitterName)
.IsRequired()
.HasColumnName("twitter_name")
.HasMaxLength(50);
entity.Property(e => e.TwitterUserId).HasColumnName("twitter_user_id");
entity.HasOne(d => d.PoliticalCandidateNavigation)
.WithMany(p => p.Tweet)
.HasForeignKey(d => d.PoliticalCandidate)
.OnDelete(DeleteBehavior.ClientSetNull)
.HasConstraintName("tweet_political_candidate_fkey");
});
}
}
}
|
package model
import finder.MethodParameterFinder
import finder.MethodReturnFinder
import org.junit.jupiter.api.Assertions.*
import org.junit.jupiter.api.Test
import sample.Sample
class MethodExtractorTest {
fun sampleParamTest(data: Sample) {
}
fun sampleReturnTest() =
Sample(
data = "test"
)
@Test
fun `Methods findClazzNames parameter`() {
val method = this.javaClass.getDeclaredMethod("sampleParamTest", Sample::class.java)
assertEquals(
MethodExtractor(listOf(method)).findClazzNames(MethodParameterFinder()),
listOf("sample.Sample")
)
}
@Test
fun `Methods findClazzNames return type`() {
val method = this.javaClass.getDeclaredMethod("sampleReturnTest")
assertEquals(
MethodExtractor(listOf(method)).findClazzNames(MethodReturnFinder()),
listOf("sample.Sample")
)
}
} |
#!/bin/bash
# Script Name: AtoMiC Transmission Installer
source "$SCRIPTPATH/inc/commons.sh"
source "$SCRIPTPATH/inc/header.sh"
echo -e "${GREEN}AtoMiC $APPTITLE Installer Script$ENDCOLOR"
source "$SCRIPTPATH/inc/pause.sh"
source "$SCRIPTPATH/inc/app-autostart-remove.sh"
source "$SCRIPTPATH/inc/app-move-previous.sh"
source "$SCRIPTPATH/inc/app-uninstall.sh"
source "$SCRIPTPATH/inc/app-uninstall-deps.sh"
source "$SCRIPTPATH/$APPNAME/$APPNAME-repository-configurator.sh"
source "$SCRIPTPATH/inc/app-repository-add.sh"
source "$SCRIPTPATH/inc/pkgupdate.sh"
source "$SCRIPTPATH/inc/app-install-deps.sh"
source "$SCRIPTPATH/inc/app-folders-create.sh"
# Just adds the override and updates it for systemd.
if IsSystemdSupported; then
source "$SCRIPTPATH/inc/app-systemd-add.sh"
source "$SCRIPTPATH/$APPNAME/$APPNAME-systemd-update.sh"
fi
source "$SCRIPTPATH/inc/app-install.sh"
if ! grep -qF 'net.core.rmem_max' /etc/sysctl.conf;then
echo -e "${YELLOW}--->$APPNAME UTP & UDP Buffer Optimizations...$ENDCOLOR"
echo 'net.core.rmem_max = 16777216' >> /etc/sysctl.conf
echo 'net.core.wmem_max = 4194304' >> /etc/sysctl.conf
sysctl -p
echo
fi
#Need to do this seperately as the init\default files wouldnt have existed before the install.
source "$SCRIPTPATH/inc/app-autostart-remove-unrequired-only.sh"
source "$SCRIPTPATH/$APPNAME/$APPNAME-init-update.sh"
source "$SCRIPTPATH/$APPNAME/$APPNAME-default-update.sh"
source "$SCRIPTPATH/inc/app-set-permissions.sh"
source "$SCRIPTPATH/utils/nzbtomedia/nzbtomedia-installer.sh"
source "$SCRIPTPATH/transmission-daemon/transmission-daemon-constants.sh"
source "$SCRIPTPATH/inc/app-start.sh"
sleep 2
source "$SCRIPTPATH/$APPNAME/$APPNAME-settings-configurator.sh"
# Command which reloads the settings file.
kill -s SIGHUP "$(pidof transmission-daemon)" >/dev/null 2>&1
source "$SCRIPTPATH/inc/app-install-confirmation.sh"
source "$SCRIPTPATH/inc/thankyou.sh"
source "$SCRIPTPATH/inc/exit.sh"
|
var searchData=
[
['ex1',['ex1',['../namespaceex1.html',1,'']]],
['ex2',['ex2',['../namespaceex2.html',1,'']]]
];
|
/*++
Copyright (c) 1996 Microsoft Corporation
Module Name:
port.c
Abstract:
This modules implements com port code to support the boot debugger.
Author:
Bryan M. Willman (bryanwi) 24-Sep-90
Revision History:
--*/
#include "bd.h"
_TUCHAR DebugMessage[80];
LOGICAL
BdPortInitialize(
IN ULONG BaudRate,
IN ULONG PortNumber,
OUT PULONG BdFileId
)
/*++
Routine Description:
This functions initializes the boot debugger com port.
Arguments:
BaudRate - Supplies an optional baud rate.
PortNumber - supplies an optinal port number.
Returned Value:
TRUE - If a debug port is found.
--*/
{
//
// Initialize the specified port.
//
if (!BlPortInitialize(BaudRate, PortNumber, NULL, FALSE, BdFileId)) {
return FALSE;
}
_stprintf(DebugMessage,
TEXT("\nBoot Debugger Using: COM%d (Baud Rate %d)\n"),
PortNumber,
BaudRate);
//
// We cannot use BlPrint() at this time because BlInitStdIo() has not been called, which is
// required to use the Arc emulator code.
//
TextStringOut(DebugMessage);
return TRUE;
}
|
<?php
namespace NeptuneSoftware\Invoice\Interfaces;
use Illuminate\Contracts\View\View;
use Illuminate\Database\Eloquent\Collection;
use Illuminate\Database\Eloquent\Model;
use NeptuneSoftware\Invoice\Models\Bill;
use Symfony\Component\HttpFoundation\Response;
interface BillServiceInterface
{
/**
* Generate bill referencing Eloquent model.
*
* @param Model $model Eloquent model with HasInvoice trait
* @param array|null $bill Bill attributes
* @return $this
*/
public function create(Model $model, ?array $bill = []): self;
/**
* Get bill model.
*
* @return Bill
*/
public function getBill(): Bill;
/**
* Get bill lines.
*
* @return Collection
*/
public function getLines(): Collection;
/**
* Set next line free sale.
*
* @return BillServiceInterface
*/
public function setFree(): BillServiceInterface;
/**
* Set next line complimentary sale.
*
* @return BillServiceInterface
*/
public function setComplimentary(): BillServiceInterface;
/**
* Add percentage tax for an bill line.
*
* @param string $identifier
* @param float $taxPercentage
* @return BillServiceInterface
*/
public function addTaxPercentage(string $identifier, float $taxPercentage = 0): BillServiceInterface;
/**
* Add fixed tax for an bill line.
*
* @param string $identifier
* @param int $taxFixed
* @return BillServiceInterface
*/
public function addTaxFixed(string $identifier, int $taxFixed = 0): BillServiceInterface;
/**
* Use this if the amount does not yet include tax.
*
* @param Model $model Set reference invoice line model
* @param Int $amount The amount in cents, excluding taxes
* @param String $description The description
* @return self This instance after recalculation
*/
public function addAmountExclTax(Model $model, int $amount, string $description): self;
/**
* Use this if the amount already includes tax.
*
* @param Model $model Set reference invoice line model
* @param Int $amount The amount in cents, excluding taxes
* @param String $description The description
* @return self This instance after recalculation
*/
public function addAmountInclTax(Model $model, int $amount, string $description): self;
/**
* Recalculates total and tax based on lines
* @return Bill This instance
*/
public function recalculate(): Bill;
/**
* Get the View instance for the invoice.
*
* @param array $data
* @return \Illuminate\View\View
*/
public function view(array $data = []): View;
/**
* Capture the invoice as a PDF and return the raw bytes.
*
* @param array $data
* @return string
*/
public function pdf(array $data = []): string;
/**
* Create an invoice download response.
*
* @param array $data
* @return \Symfony\Component\HttpFoundation\Response
*/
public function download(array $data = []): Response;
/**
* Find invoice model.
*
* @param string $reference
* @return Bill|null
*/
public function findByReference(string $reference): ?Bill;
/**
* Find or fail invoice model.
*
* @param string $reference
* @return Bill
* @throws \Illuminate\Database\Eloquent\ModelNotFoundException
*/
public function findByReferenceOrFail(string $reference): Bill;
/**
* Find bills model by invoiceLines.
*
* @param Model $model
* @return Collection
*/
public function findByInvoiceable(Model $model): Collection;
/**
* Find bills model by related.
*
* @param Model $model
* @return Collection
*/
public function findByRelated(Model $model): Collection;
}
|
realpath=`realpath $0`
dirpath=`dirname $realpath`
scriptpath="$dirpath/pve-autorepl.py --maxvmid 499 --interval */1 --rate 50 | logger -t pve-autorepl"
echo "script path is: $scriptpath"
cronrecord="0 * * * * $scriptpath"
echo "cron record is: $cronrecord"
# write out current crontab
crontab -l > mycron
# echo new cron into cron file
# m h dom mon dow command
echo "$cronrecord" >> mycron
# install new cron file
crontab mycron
rm mycron
|
import 'dart:async';
import 'package:mobx/src/api/context.dart';
import 'package:mobx/src/core.dart';
part 'async/async_action.dart';
part 'async/observable_future.dart';
part 'async/observable_stream.dart';
|
#ifndef IMAGE_SMOOTH_H
#define IMAGE_SMOOTH_H
/// Given an image of size widthxheight in srcImg, smooths the image using a
/// gaussian filter (cvSmooth) and copies the smoothed image to smoothImg If
/// sigma=1.0, then calls cvSmooth(srcImg, smoothedImg, CV_GAUSSIAN, 5, 5); This
/// is the default. If sigma>1.0, then calls cvSmooth(srcImg, smoothedImg,
/// CV_GAUSSIAN, 0, 0, sigma);
///
void SmoothImage(unsigned char *srcImg, unsigned char *smoothImg, int width,
int height, double sigma = 1.0);
#endif |
#!/bin/bash
set -eu
if [[ $# < 2 ]]; then
echo "Usage: bash perform-release.sh release_version snapshot_version staging_repository"
exit 1
fi
RELEASE_VERSION=$1
SNAPSHOT_VERSION=$2
STAGING_REPOSITORY=${3:-}
SKIP_BUILD=${SKIP_BUILD:-0}
RELEASE_PROFILE=${RELEASE_PROFILE:-sonatype}
echo "Releasing version $RELEASE_VERSION ($SNAPSHOT_VERSION) to repository $STAGING_REPOSITORY"
echo "========================================================================================"
if [[ ! -z $(git tag -l "nd4s-$RELEASE_VERSION") ]]; then
echo "Error: Version $RELEASE_VERSION has already been released!"
exit 1
fi
sed -i "s/\"currentVersion\", default = \".*\"/\"currentVersion\", default = \"$RELEASE_VERSION\"/" build.sbt
sed -i "s/\"nd4jVersion\", default = \".*\"/\"nd4jVersion\", default = \"$RELEASE_VERSION\"/" build.sbt
# ~/.ivy2/.credentials needs to look like this:
# realm=Sonatype Nexus Repository Manager
# host=oss.sonatype.org
# user=xxx
# password=xxx
if [[ "${SKIP_BUILD}" == "0" ]]; then
sbt -DrepoType=$RELEASE_PROFILE -DstageRepoId=$STAGING_REPOSITORY +publishSigned
fi
git commit -s -a -m "Update to version $RELEASE_VERSION"
git tag -s -a -m "nd4s-$RELEASE_VERSION" "nd4s-$RELEASE_VERSION"
git tag -s -a -f -m "nd4s-$RELEASE_VERSION" "latest_release"
sed -i "s/\"currentVersion\", default = \".*\"/\"currentVersion\", default = \"$SNAPSHOT_VERSION\"/" build.sbt
sed -i "s/\"nd4jVersion\", default = \".*\"/\"nd4jVersion\", default = \"$SNAPSHOT_VERSION\"/" build.sbt
git commit -s -a -m "Update to version $SNAPSHOT_VERSION"
echo "Successfully performed release of version $RELEASE_VERSION ($SNAPSHOT_VERSION) to repository $STAGING_REPOSITORY"
|
num_of_rectangle, num_of_rhombus = 0, 0
$stdin.readlines.each do |input|
edge1, edge2, diagonal_line = input.split(',').map(&:to_i)
num_of_rectangle += 1 if (edge1**2 + edge2**2) == diagonal_line**2
num_of_rhombus += 1 if edge1 == edge2
end
puts num_of_rectangle, num_of_rhombus
|
using RimWorld;
using RimWorld.Planet;
namespace RA_Code
{
public class BiomeWorker_Archipelago_Tundra : BiomeWorker
{
public override float GetScore(Tile tile, int tileID)
{
if (!tile.WaterCovered)
{
return -100f;
}
if (tile.elevation < -99)
{
return -100f;
}
if (tile.temperature < -21f)
{
return -100f;
}
return -tile.temperature;
}
}
} |
package org.psesd.srx.shared.core
import scala.xml.Node
/** SRX resource interface.
*
* @version 1.0
* @since 1.0
* @author Stephen Pugmire (iTrellis, LLC)
* */
trait SrxResource {
protected def optional(value: String, xml: Node): Node = {
if(value == null || value.isEmpty) null else xml
}
protected def isEmpty: Boolean = {false}
}
|
import torch
from torchvision import models
import numpy as np
import cv2
from torchvision import transforms
LAST_VGG_LAYER = 17
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Images must be normalized and made 224x224 pixels before entering into the vgg model
normalize = transforms.Compose([transforms.ToPILImage(),
transforms.Resize((224,224)),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])])
def extract_features(X):
''' Run X through the truncated VGG-16 model '''
X.to('cpu')
x_norm = torch.zeros((X.shape[0], 3, 224, 224)).to(device)
for i in range(len(X)):
x_norm[i,:,:,:] = normalize(X[i,:,:,:].cpu())
return vgg((x_norm))
def get_l2_mask(targets):
''' Get a 0-1 weighted matrix of features extracted using vgg-16 '''
width = targets.shape[-1]
masks = np.empty(targets.shape)
target_feats = extract_features(targets)
for i in range(len(targets)):
mask = target_feats[i,:,:,:]
mask = cv2.resize(np.transpose(mask.cpu().numpy(), (1,2,0)), (width, width))
mask = np.sum(mask, axis=-1)
mask = (mask - np.min(mask))
mask = mask / np.max(mask)
mask = np.repeat(mask[:, :, np.newaxis], 3, axis=2)
masks[i,:,:,:] = np.transpose(mask, (2, 0, 1))
# masks = 1.0 - masks # Incase you wanted to invert the mask to test it
return torch.tensor(masks).to(device)
vgg_model = models.vgg16(pretrained=True)
vgg = torch.nn.Sequential(*list(vgg_model.features.children())[:LAST_VGG_LAYER])
for param in vgg.parameters():
param.requires_grad = False
vgg.to(device)
|
use plotters::prelude::*;
const OUT_FILE_NAME: &'static str = "plotters-doc-data/full_palette.png";
fn main() -> Result<(), Box<dyn std::error::Error>> {
let root = BitMapBackend::new(OUT_FILE_NAME, (2000, 850)).into_drawing_area();
root.fill(&WHITE)?;
let mut chart = ChartBuilder::on(&root)
.caption("Demonstration of full_palette Colors", ("sans-serif", 50))
.build_cartesian_2d(-0.5f32..19f32, -1f32..15f32)?;
use full_palette::*;
let colors = [
[
RED, RED_50, RED_100, RED_200, RED_300, RED_400, RED_500, RED_600, RED_700, RED_800,
RED_900, RED_A100, RED_A200, RED_A400, RED_A700,
],
[
PINK, PINK_50, PINK_100, PINK_200, PINK_300, PINK_400, PINK_500, PINK_600, PINK_700,
PINK_800, PINK_900, PINK_A100, PINK_A200, PINK_A400, PINK_A700,
],
[
PURPLE,
PURPLE_50,
PURPLE_100,
PURPLE_200,
PURPLE_300,
PURPLE_400,
PURPLE_500,
PURPLE_600,
PURPLE_700,
PURPLE_800,
PURPLE_900,
PURPLE_A100,
PURPLE_A200,
PURPLE_A400,
PURPLE_A700,
],
[
DEEPPURPLE,
DEEPPURPLE_50,
DEEPPURPLE_100,
DEEPPURPLE_200,
DEEPPURPLE_300,
DEEPPURPLE_400,
DEEPPURPLE_500,
DEEPPURPLE_600,
DEEPPURPLE_700,
DEEPPURPLE_800,
DEEPPURPLE_900,
DEEPPURPLE_A100,
DEEPPURPLE_A200,
DEEPPURPLE_A400,
DEEPPURPLE_A700,
],
[
INDIGO,
INDIGO_50,
INDIGO_100,
INDIGO_200,
INDIGO_300,
INDIGO_400,
INDIGO_500,
INDIGO_600,
INDIGO_700,
INDIGO_800,
INDIGO_900,
INDIGO_A100,
INDIGO_A200,
INDIGO_A400,
INDIGO_A700,
],
[
BLUE, BLUE_50, BLUE_100, BLUE_200, BLUE_300, BLUE_400, BLUE_500, BLUE_600, BLUE_700,
BLUE_800, BLUE_900, BLUE_A100, BLUE_A200, BLUE_A400, BLUE_A700,
],
[
LIGHTBLUE,
LIGHTBLUE_50,
LIGHTBLUE_100,
LIGHTBLUE_200,
LIGHTBLUE_300,
LIGHTBLUE_400,
LIGHTBLUE_500,
LIGHTBLUE_600,
LIGHTBLUE_700,
LIGHTBLUE_800,
LIGHTBLUE_900,
LIGHTBLUE_A100,
LIGHTBLUE_A200,
LIGHTBLUE_A400,
LIGHTBLUE_A700,
],
[
CYAN, CYAN_50, CYAN_100, CYAN_200, CYAN_300, CYAN_400, CYAN_500, CYAN_600, CYAN_700,
CYAN_800, CYAN_900, CYAN_A100, CYAN_A200, CYAN_A400, CYAN_A700,
],
[
TEAL, TEAL_50, TEAL_100, TEAL_200, TEAL_300, TEAL_400, TEAL_500, TEAL_600, TEAL_700,
TEAL_800, TEAL_900, TEAL_A100, TEAL_A200, TEAL_A400, TEAL_A700,
],
[
GREEN, GREEN_50, GREEN_100, GREEN_200, GREEN_300, GREEN_400, GREEN_500, GREEN_600,
GREEN_700, GREEN_800, GREEN_900, GREEN_A100, GREEN_A200, GREEN_A400, GREEN_A700,
],
[
LIGHTGREEN,
LIGHTGREEN_50,
LIGHTGREEN_100,
LIGHTGREEN_200,
LIGHTGREEN_300,
LIGHTGREEN_400,
LIGHTGREEN_500,
LIGHTGREEN_600,
LIGHTGREEN_700,
LIGHTGREEN_800,
LIGHTGREEN_900,
LIGHTGREEN_A100,
LIGHTGREEN_A200,
LIGHTGREEN_A400,
LIGHTGREEN_A700,
],
[
LIME, LIME_50, LIME_100, LIME_200, LIME_300, LIME_400, LIME_500, LIME_600, LIME_700,
LIME_800, LIME_900, LIME_A100, LIME_A200, LIME_A400, LIME_A700,
],
[
YELLOW,
YELLOW_50,
YELLOW_100,
YELLOW_200,
YELLOW_300,
YELLOW_400,
YELLOW_500,
YELLOW_600,
YELLOW_700,
YELLOW_800,
YELLOW_900,
YELLOW_A100,
YELLOW_A200,
YELLOW_A400,
YELLOW_A700,
],
[
AMBER, AMBER_50, AMBER_100, AMBER_200, AMBER_300, AMBER_400, AMBER_500, AMBER_600,
AMBER_700, AMBER_800, AMBER_900, AMBER_A100, AMBER_A200, AMBER_A400, AMBER_A700,
],
[
ORANGE,
ORANGE_50,
ORANGE_100,
ORANGE_200,
ORANGE_300,
ORANGE_400,
ORANGE_500,
ORANGE_600,
ORANGE_700,
ORANGE_800,
ORANGE_900,
ORANGE_A100,
ORANGE_A200,
ORANGE_A400,
ORANGE_A700,
],
[
DEEPORANGE,
DEEPORANGE_50,
DEEPORANGE_100,
DEEPORANGE_200,
DEEPORANGE_300,
DEEPORANGE_400,
DEEPORANGE_500,
DEEPORANGE_600,
DEEPORANGE_700,
DEEPORANGE_800,
DEEPORANGE_900,
DEEPORANGE_A100,
DEEPORANGE_A200,
DEEPORANGE_A400,
DEEPORANGE_A700,
],
[
BROWN, BROWN_50, BROWN_100, BROWN_200, BROWN_300, BROWN_400, BROWN_500, BROWN_600,
BROWN_700, BROWN_800, BROWN_900, BROWN_A100, BROWN_A200, BROWN_A400, BROWN_A700,
],
[
GREY, GREY_50, GREY_100, GREY_200, GREY_300, GREY_400, GREY_500, GREY_600, GREY_700,
GREY_800, GREY_900, GREY_A100, GREY_A200, GREY_A400, GREY_A700,
],
[
BLUEGREY,
BLUEGREY_50,
BLUEGREY_100,
BLUEGREY_200,
BLUEGREY_300,
BLUEGREY_400,
BLUEGREY_500,
BLUEGREY_600,
BLUEGREY_700,
BLUEGREY_800,
BLUEGREY_900,
BLUEGREY_A100,
BLUEGREY_A200,
BLUEGREY_A400,
BLUEGREY_A700,
],
];
let color_names = [
[
"RED", "RED_50", "RED_100", "RED_200", "RED_300", "RED_400", "RED_500", "RED_600",
"RED_700", "RED_800", "RED_900", "RED_A100", "RED_A200", "RED_A400", "RED_A700",
],
[
"PINK",
"PINK_50",
"PINK_100",
"PINK_200",
"PINK_300",
"PINK_400",
"PINK_500",
"PINK_600",
"PINK_700",
"PINK_800",
"PINK_900",
"PINK_A100",
"PINK_A200",
"PINK_A400",
"PINK_A700",
],
[
"PURPLE",
"PURPLE_50",
"PURPLE_100",
"PURPLE_200",
"PURPLE_300",
"PURPLE_400",
"PURPLE_500",
"PURPLE_600",
"PURPLE_700",
"PURPLE_800",
"PURPLE_900",
"PURPLE_A100",
"PURPLE_A200",
"PURPLE_A400",
"PURPLE_A700",
],
[
"DEEPPURPLE",
"DEEPPURPLE_50",
"DEEPPURPLE_100",
"DEEPPURPLE_200",
"DEEPPURPLE_300",
"DEEPPURPLE_400",
"DEEPPURPLE_500",
"DEEPPURPLE_600",
"DEEPPURPLE_700",
"DEEPPURPLE_800",
"DEEPPURPLE_900",
"DEEPPURPLE_A100",
"DEEPPURPLE_A200",
"DEEPPURPLE_A400",
"DEEPPURPLE_A700",
],
[
"INDIGO",
"INDIGO_50",
"INDIGO_100",
"INDIGO_200",
"INDIGO_300",
"INDIGO_400",
"INDIGO_500",
"INDIGO_600",
"INDIGO_700",
"INDIGO_800",
"INDIGO_900",
"INDIGO_A100",
"INDIGO_A200",
"INDIGO_A400",
"INDIGO_A700",
],
[
"BLUE",
"BLUE_50",
"BLUE_100",
"BLUE_200",
"BLUE_300",
"BLUE_400",
"BLUE_500",
"BLUE_600",
"BLUE_700",
"BLUE_800",
"BLUE_900",
"BLUE_A100",
"BLUE_A200",
"BLUE_A400",
"BLUE_A700",
],
[
"LIGHTBLUE",
"LIGHTBLUE_50",
"LIGHTBLUE_100",
"LIGHTBLUE_200",
"LIGHTBLUE_300",
"LIGHTBLUE_400",
"LIGHTBLUE_500",
"LIGHTBLUE_600",
"LIGHTBLUE_700",
"LIGHTBLUE_800",
"LIGHTBLUE_900",
"LIGHTBLUE_A100",
"LIGHTBLUE_A200",
"LIGHTBLUE_A400",
"LIGHTBLUE_A700",
],
[
"CYAN",
"CYAN_50",
"CYAN_100",
"CYAN_200",
"CYAN_300",
"CYAN_400",
"CYAN_500",
"CYAN_600",
"CYAN_700",
"CYAN_800",
"CYAN_900",
"CYAN_A100",
"CYAN_A200",
"CYAN_A400",
"CYAN_A700",
],
[
"TEAL",
"TEAL_50",
"TEAL_100",
"TEAL_200",
"TEAL_300",
"TEAL_400",
"TEAL_500",
"TEAL_600",
"TEAL_700",
"TEAL_800",
"TEAL_900",
"TEAL_A100",
"TEAL_A200",
"TEAL_A400",
"TEAL_A700",
],
[
"GREEN",
"GREEN_50",
"GREEN_100",
"GREEN_200",
"GREEN_300",
"GREEN_400",
"GREEN_500",
"GREEN_600",
"GREEN_700",
"GREEN_800",
"GREEN_900",
"GREEN_A100",
"GREEN_A200",
"GREEN_A400",
"GREEN_A700",
],
[
"LIGHTGREEN",
"LIGHTGREEN_50",
"LIGHTGREEN_100",
"LIGHTGREEN_200",
"LIGHTGREEN_300",
"LIGHTGREEN_400",
"LIGHTGREEN_500",
"LIGHTGREEN_600",
"LIGHTGREEN_700",
"LIGHTGREEN_800",
"LIGHTGREEN_900",
"LIGHTGREEN_A100",
"LIGHTGREEN_A200",
"LIGHTGREEN_A400",
"LIGHTGREEN_A700",
],
[
"LIME",
"LIME_50",
"LIME_100",
"LIME_200",
"LIME_300",
"LIME_400",
"LIME_500",
"LIME_600",
"LIME_700",
"LIME_800",
"LIME_900",
"LIME_A100",
"LIME_A200",
"LIME_A400",
"LIME_A700",
],
[
"YELLOW",
"YELLOW_50",
"YELLOW_100",
"YELLOW_200",
"YELLOW_300",
"YELLOW_400",
"YELLOW_500",
"YELLOW_600",
"YELLOW_700",
"YELLOW_800",
"YELLOW_900",
"YELLOW_A100",
"YELLOW_A200",
"YELLOW_A400",
"YELLOW_A700",
],
[
"AMBER",
"AMBER_50",
"AMBER_100",
"AMBER_200",
"AMBER_300",
"AMBER_400",
"AMBER_500",
"AMBER_600",
"AMBER_700",
"AMBER_800",
"AMBER_900",
"AMBER_A100",
"AMBER_A200",
"AMBER_A400",
"AMBER_A700",
],
[
"ORANGE",
"ORANGE_50",
"ORANGE_100",
"ORANGE_200",
"ORANGE_300",
"ORANGE_400",
"ORANGE_500",
"ORANGE_600",
"ORANGE_700",
"ORANGE_800",
"ORANGE_900",
"ORANGE_A100",
"ORANGE_A200",
"ORANGE_A400",
"ORANGE_A700",
],
[
"DEEPORANGE",
"DEEPORANGE_50",
"DEEPORANGE_100",
"DEEPORANGE_200",
"DEEPORANGE_300",
"DEEPORANGE_400",
"DEEPORANGE_500",
"DEEPORANGE_600",
"DEEPORANGE_700",
"DEEPORANGE_800",
"DEEPORANGE_900",
"DEEPORANGE_A100",
"DEEPORANGE_A200",
"DEEPORANGE_A400",
"DEEPORANGE_A700",
],
[
"BROWN",
"BROWN_50",
"BROWN_100",
"BROWN_200",
"BROWN_300",
"BROWN_400",
"BROWN_500",
"BROWN_600",
"BROWN_700",
"BROWN_800",
"BROWN_900",
"BROWN_A100",
"BROWN_A200",
"BROWN_A400",
"BROWN_A700",
],
[
"GREY",
"GREY_50",
"GREY_100",
"GREY_200",
"GREY_300",
"GREY_400",
"GREY_500",
"GREY_600",
"GREY_700",
"GREY_800",
"GREY_900",
"GREY_A100",
"GREY_A200",
"GREY_A400",
"GREY_A700",
],
[
"BLUEGREY",
"BLUEGREY_50",
"BLUEGREY_100",
"BLUEGREY_200",
"BLUEGREY_300",
"BLUEGREY_400",
"BLUEGREY_500",
"BLUEGREY_600",
"BLUEGREY_700",
"BLUEGREY_800",
"BLUEGREY_900",
"BLUEGREY_A100",
"BLUEGREY_A200",
"BLUEGREY_A400",
"BLUEGREY_A700",
],
];
use plotters::style::text_anchor::*;
let centered = Pos::new(HPos::Center, VPos::Top);
let label_style = TextStyle::from(("monospace", 14.0).into_font()).pos(centered);
for (col, colors) in colors.iter().enumerate() {
chart.draw_series(colors.iter().zip(color_names[col].iter()).enumerate().map(
|(row, (color, &name))| {
let row = row as f32;
let col = col as f32;
EmptyElement::at((col, row))
+ Circle::new((0, 0), 15, color.filled())
+ Text::new(name, (0, 16), &label_style)
},
))?;
}
// To avoid the IO failure being ignored silently, we manually call the present function
root.present().expect("Unable to write result to file, please make sure 'plotters-doc-data' dir exists under current dir");
println!("Result has been saved to {}", OUT_FILE_NAME);
Ok(())
}
#[test]
fn entry_point() {
main().unwrap()
}
|
---
title: URL HTTPS Verification Plugin
layout: default
summary: Verfifies that an HTTPS connection can be trusted
--- |
#[macro_use]
mod macros;
#[test]
fn test_basic() {
let content = "#!/usr/bin/env rustx\nfn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
shebang: Some("#!/usr/bin/env rustx"),
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}
#[test]
fn test_comment() {
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
attrs: [
Attribute {
style: Inner,
path: Path {
segments: [
PathSegment {
ident: "allow",
arguments: None,
},
],
},
tokens: TokenStream(`(dead_code)`),
},
],
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}
|
require('should')
const isbn_ = require('lib/isbn/isbn')
describe('isbn', () => {
// Test only what was added on top of the isbn3 module
describe('parse', () => {
it('should return a ISBN data object', () => {
const data = isbn_.parse('9788420646657')
data.should.be.an.Object()
data.isbn13.should.equal('9788420646657')
})
it('should recover truncated ISBN-13', () => {
isbn_.parse('8420646657').should.be.an.Object()
})
})
})
|
chorus_home = File.expand_path(File.dirname(__FILE__) + '/../')
#require File.join(chorus_home, 'config', 'boot')
require File.join(chorus_home, 'app', 'models', 'chorus_config')
chorus_config = ChorusConfig.new(chorus_home)
max_connections = chorus_config["database_threads"]
print max_connections |
# rpi-rb-mongodb
Docker Raspberry Pi2 MongoDB 3.0.9 Build
1) git clone
2) docker build -t <yourDockerhubName>/rpi-rb-mongodb .
3) docker run -d -p 27017:27017 -v /home/pi/mongodb:/data soulmanos/rpi-rb-mongodb
|
# Copyright 2018, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import logging
import threading
import typing
from typing import Optional
if typing.TYPE_CHECKING: # pragma: NO COVER
from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import (
StreamingPullManager,
)
_LOGGER = logging.getLogger(__name__)
_HEARTBEAT_WORKER_NAME = "Thread-Heartbeater"
# How often to send heartbeats in seconds. Determined as half the period of
# time where the Pub/Sub server will close the stream as inactive, which is
# 60 seconds.
_DEFAULT_PERIOD = 30
class Heartbeater(object):
def __init__(self, manager: "StreamingPullManager", period: int = _DEFAULT_PERIOD):
self._thread: Optional[threading.Thread] = None
self._operational_lock = threading.Lock()
self._manager = manager
self._stop_event = threading.Event()
self._period = period
def heartbeat(self) -> None:
"""Periodically send streaming pull heartbeats.
"""
while not self._stop_event.is_set():
if self._manager.heartbeat():
_LOGGER.debug("Sent heartbeat.")
self._stop_event.wait(timeout=self._period)
_LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME)
def start(self) -> None:
with self._operational_lock:
if self._thread is not None:
raise ValueError("Heartbeater is already running.")
# Create and start the helper thread.
self._stop_event.clear()
thread = threading.Thread(
name=_HEARTBEAT_WORKER_NAME, target=self.heartbeat
)
thread.daemon = True
thread.start()
_LOGGER.debug("Started helper thread %s", thread.name)
self._thread = thread
def stop(self) -> None:
with self._operational_lock:
self._stop_event.set()
if self._thread is not None:
# The thread should automatically exit when the consumer is
# inactive.
self._thread.join()
self._thread = None
|
package br.com.rafanereslima.marvelousheroes.presentation.components
import android.app.Activity
import android.app.AlertDialog
import android.graphics.Color
import android.graphics.drawable.ColorDrawable
import android.os.Bundle
import br.com.rafanereslima.marvelousheroes.R
internal class CustomLoading(activity: Activity?) : AlertDialog(activity) {
init {
instance = this
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
configureUi()
}
private fun configureUi() {
setContentView(R.layout.custom_loading)
window?.setBackgroundDrawable(ColorDrawable(Color.TRANSPARENT))
setCancelable(false)
}
companion object {
private var instance: CustomLoading? = null
fun getInstance(activity: Activity?): CustomLoading {
if (instance == null) {
instance = CustomLoading(activity)
}
return instance!!
}
}
} |
package com.uramnoil.awesome_minecraft_console.endervision.compose.atoms
import androidx.compose.desktop.ui.tooling.preview.Preview
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.text.SpanStyle
import androidx.compose.ui.text.buildAnnotatedString
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextDecoration
import androidx.compose.ui.text.withStyle
import com.uramnoil.ansies.parameter.*
import com.uramnoil.ansies.parse.ansi
import com.uramnoil.ansies.toSpans
import com.uramnoil.awesome_minecraft_console.endervision.common.usecase.Line
@Composable
fun Line(line: Line) {
Box(Modifier.fillMaxWidth()) {
Text(buildAnnotatedString {
line.value.ansi().asciiCodeOrStringList.toSpans().forEach {
val color = SpanStyle(
color = when (it.sgr.foregroundColor) {
BlackForeground -> Color.Black
BlueForeground -> Color.Blue
CyanForeground -> Color.Cyan
DefaultForegroundColor -> Color.White
GreenForeground -> Color.Green
MagentaForeground -> Color.Magenta
RedForeground -> Color.Red
is SelectForegroundColor -> Color.White
WhiteForeground -> Color.White
YellowForeground -> Color.Yellow
null -> Color.White
},
textDecoration = TextDecoration.combine(
listOf(
when (it.sgr.underline) {
Underlined -> TextDecoration.Underline
else -> TextDecoration.None
},
when (it.sgr.crossedOut) {
CrossedOut -> TextDecoration.LineThrough
else -> TextDecoration.None
}
)
),
fontWeight = when (it.sgr.intensity) {
BoldOrIncreasedIntensity -> FontWeight.Bold
FaintDecreasedIntensityOrDim -> FontWeight.Light
else -> FontWeight.Normal
}
)
withStyle(color) {
append(it.string)
}
}
}, modifier = Modifier.fillMaxWidth())
}
}
@Preview
@Composable
fun PreviewLine() {
Box(Modifier.background(Color.Black)) {
Line(Line("Hoge"))
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.