content
stringlengths
7
2.61M
/** * Parses placeholders from templates and applies placeholder values to them. */ public class Template { private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\{\\{([^:{}]+)(?::([^:{}]+))?(?::([^:{}]+))?\\}\\}"); private Template() { // intentionally left blank } /** * Parses a template. * * @param text * Raw template. * * @return A map, mapping placeholder names to their positions. */ public static Map<String, Placeholder> parse(String text) { Matcher matcher = PLACEHOLDER_PATTERN.matcher(text); Map<String, Placeholder> placeholders = new HashMap<>(); List<Placeholder.Position> positions; Placeholder placeholder; while (matcher.find()) { String name = matcher.group(1); String description = matcher.group(2); String type = matcher.group(3); if (description == null) { description = ""; } if (type == null) { type = "text"; } if (placeholders.containsKey(name)) { placeholder = placeholders.get(name); positions = placeholder.getPositions(); // If this placeholder is not the first occurrence, // use description and type from first occurrence. description = placeholder.getDescription(); type = placeholder.getType(); } else { positions = new ArrayList<>(); } positions.add(new Placeholder.Position(matcher.start(), matcher.end())); placeholders.put(name, new Placeholder(name, description, type, positions)); } return placeholders; } /** * Applies values to a template, effectively rendering it. * * @param text * Raw template. * @param placeholders * Placeholder name to replacement map. * * @return Rendered template. */ public static String apply(String text, Map<String, String> placeholders) { if (text == null) { throw new IllegalArgumentException("Text must not be null!"); } for (String key : placeholders.keySet()) { if (placeholders.get(key) == null) { throw new IllegalArgumentException("Placeholder '" + key + "' must not be null!"); } } Map<String, Placeholder> placeholderMap = parse(text); if (!placeholderMap.keySet().equals(placeholders.keySet())) { throw new IllegalArgumentException("Placeholders do not match the template's definition!"); } int offset = 0; List<ReplaceOp> replacements = new ArrayList<>(); for (Placeholder placeholder : placeholderMap.values()) { for (Placeholder.Position position : placeholder.getPositions()) { replacements.add(new ReplaceOp(position.getStart(), position.getEnd(), placeholders.get(placeholder.getName()))); } } Collections.sort(replacements); for (ReplaceOp op : replacements) { text = text.substring(0, op.getStart() + offset) + op.getReplacement() + text.substring(op.getEnd() + offset); offset += op.getReplacement().length() - (op.getEnd() - op.getStart()); } return text; } /** * Replacement operations have to be executed in order, so we need an additional helper class. */ private static class ReplaceOp implements Comparable<ReplaceOp> { private int start; private int end; private String replacement; public ReplaceOp(int start, int end, String replacement) { this.start = start; this.end = end; this.replacement = replacement; } public int getStart() { return this.start; } public int getEnd() { return this.end; } public String getReplacement() { return this.replacement; } @Override public int compareTo(ReplaceOp op) { if (start < op.start) { return -1; } if (start > op.start) { return 1; } if (end < op.end) { return -1; } if (end > op.end) { return 1; } return 0; } } }
<gh_stars>1-10 package leetcode; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; /** Given a collection of integers that might contain duplicates, nums, return all possible subsets. Note: Elements in a subset must be in non-descending order. The solution set must not contain duplicate subsets. For example, If nums = [1,2,2], a solution is: [ [2], [1], [1,2,2], [2,2], [1,2], [] ] * @author jun * */ public class BackTracking { public List<List<Integer>> subsetsWithDup(int[] nums) { if(nums == null || nums.length == 0) return new ArrayList<List<Integer>>(); Set<List<Integer>> set = new HashSet<List<Integer>>(); Arrays.sort(nums); for(int i : nums) { List<List<Integer>> tmp = new ArrayList<List<Integer>>(); for(List<Integer> subset : set) tmp.add(new ArrayList<Integer>(subset)); for(List<Integer> subset : tmp) subset.add(i); List<Integer> single = new ArrayList<Integer>(); single.add(i); tmp.add(single); set.addAll(tmp); } set.add(new ArrayList<Integer>()); List<List<Integer>> result = new ArrayList<List<Integer>>(set); return result; } public static void main(String[] args) { // TODO Auto-generated method stub BackTracking bt = new BackTracking(); int[] nums = {2,2,1}; List<List<Integer>> result = bt.subsetsWithDup(nums); System.out.println(result); } }
I will give him the benefit of the doubt and assume that by "have" he means " should have." In any case, the audience applauds and he goes on: "Now people think feminist means, like, some woman's gonna start yelling at 'em ... That's why even some women don't clap. 'Oh I don't want that crazy bitch yelling at me!'" [Cue raucous laughter.] But now, hearing Ansari's ensuing explanation makes my skin crawl: "If you look up feminist in the dictionary" — the classic introduction of mansplainers everywhere — "it just means someone who believes men and women have equal rights." I know that 19-year-old feminist-button-wearing me would have been thrilled. At some point, especially early on in one's interaction with feminism, the word alone feels profound; it's gratifying to share it with anyone . On Monday night, appearing on the Late Show With David Letterman , comedian Aziz Ansari came out as a feminist. His girlfriend is a "big feminist," he said, which is what led him to think about feminism in the first place. And as a result of this bit, Ansari is getting pat after pat after pat on the back, simply for picking up the word and calling it his own. In this part of the joke, Ansari summons the specter of this yelling, angry woman as something "feminist" doesn't mean. His feminism doesn't need to be threatening. It doesn't have to make anyone uncomfortable. He knows why you ladies might hesitate to use this cool new word he's handing you, and he knows why you shouldn't. Feminism, in Ansari's portrayal, is a small, easy pill to swallow. But I'm a woman, and I'm angry. Generally speaking I try not to yell, but it happens. That crazy bitch he's talking about could be me. In disassociating her from Good Feminism, it feels as though Ansari is trying to kick me out of my own club. Then, too, the issues Ansari goes on to list as the central tenets of his feminism are very particular, and very safe: equal pay. The vote. The undue burden of domestic responsibilities. And certainly, these are all important, and worth caring about, and, with the exception of the vote, which has been ours for the taking for almost 100 years, remain unsolved. They are, as my friend Christine Friar cheekily put it, "the man with candy in a van of feminist discourse," a conciliatory nod with dubious intent. They are, with the best of intentions, a perfectly fine starting point. They are also the issues most prevalent among feminism's first wave; Ansari's version of feminism is largely white and upper class. Low-income families can't afford to wonder who will stay home and raise the kids. Women of color face a much steeper wage gap than white women. When Ansari invokes Beyoncé and Jay Z as the model couple, he is speaking about two of the richest human beings on Earth. When he suggests that someone might go to an On the Run show and think to himself that Beyoncé should be at home cooking for Jay Z, it's a joke, but it's also absurd. Obviously, they have a chef. It also seems worth mentioning that Beyoncé herself has, on countless occasions, presented the public with her own identity as an all-caps FEMINIST — more complex, more challenging, shown more than told. That Ansari somehow manages to put Beyoncé and feminism in the same sentence without acknowledging that is irksome. That's what grates the most about the interview: Why congratulate Ansari for claiming a watered-down version of something so many women have been arguing (however angrily, or not) for ages? It is akin to someone showing up late to a party already well under way, and to then announce that it can finally begin. This is not a problem exclusive to Aziz Ansari. Before him, there was Joseph Gordon-Levitt — widely praised for calling himself a feminist, first on Ellen in January, and again in a video called "RE: Feminism" released last month. Gordon-Levitt's feminism echoes the banal platitudes of Ansari's, if delivered more smugly; in the latter video, he says, "To me, it just means that your gender doesn't have to define who you are." Oh, word? How wonderful. It's not just that he's objectively wrong — our gender, and our gender presentation, how we identify, and the way other people identify us, do absolutely set boundaries in our lives — but that he's making it sound so simple. It's not simple. It's the most difficult thing I encounter every day. To have it borrowed, set to music, and delivered back to me by a handsome young man who strips it of all nuance entirely, feels co-optive. Whatever it makes me feel, it's not congratulatory. There is a push and a pull here, and it's not only about famous men. I ultimately would rather Ansari, and Gordon-Levitt, and any man, whoever he is, to think about feminist issues than not. It will never be too soon. A little is better than not at all. Feminism is for everyone and I want to believe that. But I no longer think claiming the word feminist is particularly worthy of accolades. Acting like one — that is.
<filename>src/java/org/apache/ivy/ant/IvyCleanCache.java /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.ant; import org.apache.ivy.core.cache.RepositoryCacheManager; import org.apache.ivy.core.settings.IvySettings; import org.apache.tools.ant.BuildException; /** * Cleans the content of Ivy cache(s). */ public class IvyCleanCache extends IvyTask { public static final String ALL = "*"; public static final String NONE = "NONE"; private boolean resolution = true; private String cache = ALL; public String getCache() { return cache; } /** * Sets the name of the repository cache to clean, '*' for all caches, 'NONE' for no repository * cache cleaning at all. * * @param cache * the name of the cache to clean. Must not be <code>null</code>. */ public void setCache(String cache) { this.cache = cache; } public boolean isResolution() { return resolution; } /** * Sets weither the resolution cache should be cleaned or not. * * @param resolution * <code>true</code> if the resolution cache should be cleaned, <code>false</code> * otherwise. */ public void setResolution(boolean resolution) { this.resolution = resolution; } public void doExecute() throws BuildException { IvySettings settings = getIvyInstance().getSettings(); if (isResolution()) { settings.getResolutionCacheManager().clean(); } if (ALL.equals(getCache())) { RepositoryCacheManager[] caches = settings.getRepositoryCacheManagers(); for (int i = 0; i < caches.length; i++) { caches[i].clean(); } } else if (!NONE.equals(getCache())) { RepositoryCacheManager cache = settings.getRepositoryCacheManager(getCache()); if (cache == null) { throw new BuildException("unknown cache '" + getCache() + "'"); } else { cache.clean(); } } } }
package com.fishercoder.solutions; import java.util.HashSet; import java.util.Set; /** * Given an array nums containing n + 1 integers where each integer is between 1 and n (inclusive), * prove that at least one duplicate number must exist. Assume that there is only one duplicate number, find the duplicate one. Note: You must not modify the array (assume the array is read only). You must use only constant, O(1) extra space. Your runtime complexity should be less than O(n2). There is only one duplicate number in the array, but it could be repeated more than once. */ public class _287 { //no-brainer, used O(n) space public int findDuplicate(int[] nums) { Set<Integer> set = new HashSet<>(); int dup = 0; for (int i = 0; i < nums.length; i++) { if (!set.add(nums[i])) { dup = nums[i]; break; } } return dup; } class Solution_O1 { public int findDuplicate(int[] nums) { int slow = 0; int fast = 0; int finder = 0; while (true) { slow = nums[slow]; fast = nums[nums[fast]]; if (slow == fast) break; } while (true) { slow = nums[slow]; finder = nums[finder]; if (slow == finder) return slow; } } } }
<gh_stars>1-10 /* * Copyright (c) 2022, Adam <<EMAIL>> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.loottracker; import java.time.Instant; import java.util.Arrays; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import net.runelite.http.api.loottracker.LootRecordType; @Data @NoArgsConstructor @EqualsAndHashCode(of = {"type", "name"}) class ConfigLoot { LootRecordType type; String name; int kills; Instant first = Instant.now(); Instant last; int[] drops; ConfigLoot(LootRecordType type, String name) { this.type = type; this.name = name; this.drops = new int[0]; } void add(int id, int qty) { for (int i = 0; i < drops.length; i += 2) { if (drops[i] == id) { drops[i + 1] += qty; return; } } drops = Arrays.copyOf(drops, drops.length + 2); drops[drops.length - 2] = id; drops[drops.length - 1] = qty; } int numDrops() { return drops.length / 2; } }
######### # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # from flask.ext.restful import fields from flask_restful_swagger import swagger from manager_rest.responses import (BlueprintState as BlueprintStateV1, # NOQA Execution, Deployment, DeploymentModification, Node, NodeInstance, ProviderContext) @swagger.model class BlueprintState(BlueprintStateV1): resource_fields = dict(BlueprintStateV1.resource_fields.items() + { 'description': fields.String, 'main_file_name': fields.String }.items()) def __init__(self, **kwargs): super(BlueprintState, self).__init__(**kwargs) self.description = kwargs['description'] self.main_file_name = kwargs['main_file_name'] @swagger.model class Plugin(object): resource_fields = { 'id': fields.String, 'package_name': fields.String, 'archive_name': fields.String, 'package_source': fields.String, 'package_version': fields.String, 'supported_platform': fields.String, 'distribution': fields.String, 'distribution_version': fields.String, 'distribution_release': fields.String, 'wheels': fields.Raw, 'excluded_wheels': fields.Raw, 'supported_py_versions': fields.Raw, 'uploaded_at': fields.String} def __init__(self, **kwargs): self.id = kwargs['id'] self.package_name = kwargs['package_name'] self.archive_name = kwargs['archive_name'] self.package_source = kwargs['package_source'] self.package_version = kwargs['package_version'] self.supported_platform = kwargs['supported_platform'] self.distribution = kwargs['distribution'] self.distribution_version = kwargs['distribution_version'] self.distribution_release = kwargs['distribution_release'] self.wheels = kwargs['wheels'] self.excluded_wheels = kwargs['excluded_wheels'] self.supported_py_versions = kwargs['supported_py_versions'] self.uploaded_at = kwargs['uploaded_at'] @swagger.model class Snapshot(object): resource_fields = { 'id': fields.String, 'created_at': fields.String, 'status': fields.String, 'error': fields.String } def __init__(self, **kwargs): self.id = kwargs['id'] self.created_at = kwargs['created_at'] self.status = kwargs['status'] self.error = kwargs['error'] @swagger.model class ListResponse(object): resource_fields = { 'metadata': fields.Raw, 'items': fields.List(fields.Raw)} def __init__(self, **kwargs): self.metadata = kwargs['metadata'] self.items = kwargs['items']
#include <stdio.h> #include <iostream> #include <string> #include <sstream> using namespace std; int main() { int n,m,i,j,k,num,s,b; scanf("%d %d",&n,&m); bool bulb[m]; for(i=0;i<m;i++) { bulb[i] = false; } for(i=0,k=0;i<n;i++) { scanf("%d",&s); for(j=0;j<s;j++) { scanf("%d",&b); if(!bulb[b-1]) { bulb[b-1] = true; k++; } } } if(k==m) { printf("YES"); } else { printf("NO"); } }
Dynamic regulation of tumor growth and metastasis by heparan sulfate glycosaminoglycans. This article focuses on the emerging views and concepts concerning the role of cell surface and extracellular heparan sulfate-like glycosaminoglycans (HSGAGs) in tumor biology. HSGAGs, found ubiquitously both at the cell surface and in the extracellular matrix (ECM), play a critical role in regulating tumor initiation, progression, and metastasis. The diverse biological functions of HSGAGs include the regulation of coagulation, growth factor signaling, cell adhesion, proliferation, and mobility. HSGAGs, depending on their location (anchored at the cell surface or soluble as free GAGs), the signaling molecules they associate with, and their fine structures, can either promote or inhibit the tumorigenic process.
MMID: Multimodal Multi-view Integrated Database for human behavior understanding The paper introduces the Multimodal Multi-view Integrated Database (MMID), which holds human activities in presentation situations. MMID contains audio, video, human body motions, and transcripts, which are related to each other by their occurrence time. MMID accepts basic queries for the stored data. One can examine, by referring the retrieved data, how the different modalities are cooperatively and complementarily used in real situations. This examination over different situations is essential for understanding human behaviors, since they are heavily dependent on their contexts and personal characteristics. In this sense, MMID can serve as a basis for systematic or statistical analysis of those modalities, and it can be a good tool when one designs an intelligent user interface system or a multimedia contents handling system. The authors present the database design and its possible applications.
Double Faces of the Human Gut Microbiota The human intestine is colonized with symbiotic microbes, the gut microbiota has gained extensive attention. Recent researches have showed that the complex microflora has cohesive relationship with human health and the metabolism can be both beneficial and detrimental. The intestinal microbes can be concerned with its host through affecting the immune system, nutrition, metabolism, as well as inflammation or diseases. In this review, we discuss some current knowledge of the influence and function of the gut microbiota which might have therapeutic implications. In the aspects of absorbing nourishment from indigestible carbohydrates or regulating immunity system to defend pathogen, the gut microbiota is of great importance. In turn, any dysbacteriosis can bring about untoward effect or even diseases like obesity or gut inflammation. As the process and interaction between the host and the gut microbes are dynamic and complicated, it requires further researches to clarify the mechanism. Introduction The mucosal surfaces of human beings are colonies of a greatly large and dynamically complicated collection of microorganisms, almost 90% of our cells are microbial, whereas the remaining 10% belong to human beings. And the microbial community in the intestine seems to be the most essential one, which contains the vast majority of the gut microbiota in human beings is an endogenous metagenome containing at least 100 times as many genes as human genome. Recently the gut microbiota has received wide range of attention as it contributes in regulating and controlling several metabolic pathways of its host, such as nutrition absorption and immunity regulation. The development of the gut microbiota can be complexly affected by many factors like the environment, diet, drug, and life style. It plays critical roles in defining the myriad features of human biology. The gut microbes show enormous importance in absorbing nutrition, especially it's essential for processing dietary polysaccharides, which are otherwise indigestible. As the gut microbes can be regarded as a metabolically active organ which can conduct multiple biochemical reactions, the host, under this circumstance, achieves an extent of metabolic adaptability to deal with changes in nutrition availability. However, while helping their host to extract calories from indigestible polysaccharides, the disorder or other changes in gut microbial ecology may affect the energy homeostasis, and might act as an important factor in causing obesity. There are differences in the gut microbes between obese and lean individuals, and the differences may cause different efficiency in extracting or storing energy from the given diet and finally, result in excess caloric intake which is one of the root cause of obesity. In many researches, it has now been considered that the commensal bacteria especially the gut microbes play a significant role in the immunity, like sometimes through interacting with gut epithelium. On the other hand, the gut microbes can also become the antigenic stimulus for inflammation. As the relationship between the symbiotic microflora and the immune system is dynamic and precarious, great changes in the gut microbial ecology may become the pathogenesis. Besides, the gut microbes can also help prevent and cure some disease, or even contribute to resist the cold. As it means that a harmonious relation between microbiota and the host may benefit metabolic homeostasis. Nevertheless, once the commensal microbiota community become maladjustment, it would affect the common metabolic function and might result in specific disease such as obesity, inflammation, or even neurodegenerative diseases, specific microflora can also be harm to the body, such as the fatty liver disease caused by high-alcohol-producing Klebsiella pneumoniae8. Benefit The human endogenous gut microflora is essential in absorbing nourishment, regulating epithelial development, and introducing congenital immunity. There are enormous uncultivated species and novel microorganisms which have been discovered in multiple colonic mucosal sites and feces and the microbial diversity can help researchers exploring its roles in health and essential functions of these symbiotic bacteria include protecting against the injury of epithelial cell, regulating host fat storage, and stimulating intestinal angiogenesis. The Immune System As an important immune organ, the gut immune system has a challenge of resisting pathogens while holding unresponsiveness to food antigens and commensal microflora. It is evident that the gut invests abundant lymphoid tissue and immune cells to protect itself. The single layer of gut epithelium is the primary cellular barrier in preventing from exogenous infection which has a surface area expanding to the order of 400m2, and each cell maintains a tight association with its neighbors and seals the whole surface of the gut. The barrier is a highly dynamic structure that at some extent limits antigens from entering the tissues, while the immune system constantly samples antigens. There is abundant evidence shows that the normal commensal flora can result in an anti-inflammatory influence and is able to protect epithelial cells from toxic insult. It seems that mutualism appears to exist between the gut epithelium and the commensal flora which can maintain the integrity of epithelial. For example, the function of gut barrier can be increased through recognitions of TLR2 (toll-like receptor 2) and TLR9 (toll-like receptor 2) ligands by epithelial cells. And the normal flora also induces cytoprotective proteins hsp25 and hsp72 in colonic epithelial cells. Although in vitro epithelial proinflammatory exists as the responses to the commensal flora, most individual maintain incurring disease with a tremendous intestinal flora. There also have interactions of commensals with the mucosal immune system. The evidence showed that it's sufficient to restore the mucosal immune system in germ-free mice through the reconstitution with a microbial flora. Over recent years, the use of probiotics has increased as they are believed to have beneficial effects to improve immune functions. Stainslaw et al. have found that Lactobacilli are highy effective at surpressing virus-induced inflammation and can protect against lethal disease when targeted to the respiratory epithelium. The priming with live Lactobacilli resulted in diminished granulocyte recruitment and expression of multiple proinflammatory cytokines, including CXCL10, CXCL1, CCL2 and THF, additionally reduced virus recovery. Meanwhile, they have identified and characterized an effective innate immune shield mediated by Lactobacilli has a possibility to be ultimately served as critical and long-term protection against infection in the absence of specific antiviral vaccines. Banasiewicz et al.'s research showed that long-term use of probiotics Lactobacillus and Bifidobacterium has a Prophylactic effect on the occurrence and severity of Pouchitis. They did this experiment to assess the impact of a long-term use of the composite probiotics in patients after restorative proctocolectomy and after 9 months of probiotic taking, the number of patients and the average severity of pouchitis evidently decrease. As a result, they claimed that long-term use of probiotics might be a safe and well accepted way to prevent pouchitis. Similarly, Anna Berggren et al. also study the effect of Lactobacilli in immune system, especially the defence against viral infections. In conclusion, the intake of two probiotic strains (DSM 15213 and DSM 13434) can authentically reduce the risk of acquiring common cold infections. Brylee A. Haywood et al.'s research focused on infections in athletes, in such a special community, the attenuation of the number and severity of infection plays an important role. Through a randomised control trial with two arms, which are placebo and probiotic, they have found that the use of probiotics can efficiently reduce the duration and incidence of infections, while has no effect in the severity. Metabolism At early ages, humans have already developed symbiotic relationships with microbes. Many factors affect the convergence of microbes in our guts, such as the environment, diet, genetics and temporal variation. The symbiotic microflora in our bodies can be regarded as a previously unknown organ in terms of its effects, especially its extensive metabolic capabilities. The microflora provides its host with a range of otherwise inaccessible metabolic capabilities. The microbiome is relatively plastic, which is different from its host genome. It can be affected by diet, probiotics, drugs, and metabolites. Therefore, intentional alterations in the microbes can affect multiple biological processes and eventually impact the health, specific microbes have been viewed to have the ability to deactivate or activate specific xenobiotics and can alter the results of different therapeutic agents. Nutrient Absorption Among humans and other mammals, an enormous, complicated and dynamic community of microflora is coexisting with themselves. These microorganisms achieve energy particularly through carbohydrates, which are usually nondigestible to their host. Recently, the study of Bacteroides thetaiotaomicron has led the research of the molecular mechanisms of the degradation of complex polysaccharides by the intestinal microbiota. The gut microbes can be collectively regarded as an active organ as they conduct a multitude of biochemical reactions which hold a critical post in nutrition. There is no need for the hosts to evolve such functions as the symbiotic microbes with metabolic capabilities can break down these compounds. The host can then adapt to changes in diet and nutrient availability. In return, the microbes can achieve a secure and nutrient-rich niche where they can multiply. Such arrangement makes a great deal of sense in commensalism. Commensalism is a kind of interactions between gut bacteria and their host, and commensal relationships are concentrated on metabolic capabilities typically which allows either or both partners in this relationship to exploit an otherwise unavailable or poorly utilizable nutrient foundation. Intuitively, it seems that the host and the gut microbiota would be involved in competition for nutrients as their substrates both come from the diet by the host. However, the research showed that the microbiota could aid its host in extracting maximum nutritional value from the diet, as the germ-free raised animals require 30% more caloric intake to maintain their body weight than the conventionally counterparts, who have traditional gut microbiota. There might be complementary mechanisms working in it. The first one is that the microbial metabolism has the ability to converse many dietary substances into nutrients which can be utilized and absorbed by the host. And the second one is that the microbiota can alter the intrinsic metabolic machinery in host cells, and as a result, the nutrient uptake and utilization would be more efficient. Obesity Obesity is a disease across different populations with a complex etiology and variable prevalence. Obesity can be influenced by diet, behavior, environment, genetic factors, and as well as sex, age, race, ethnicity and socioeconomic status. Recently many studies have reported that there are differences in composition of the gut microbiota between obese and lean humans. The microbial genomes encode metabolic capacities which their host haven't evolved wholly on their own, and these include the abilities to degrade indigestible components in the diet, and therefore affect the energy balance. The alteration by changes in gut microbial ecology gives small and sluggish changes in energy balance rather than having great contribution to obesity, and can result in magnificent changes in body weight over the course of a year. The gut microbiota can be regarded as an additional contributing factor to the pathophysiology of obesity. Peter J. Turnbaugh et al.'s research showed that obesity is related to changes in the relative abundance of two dominant bacterial colonies, the Bacteroidetes and the Formicutes. They demonstrate that these changes can affect the metabolic potential of the gut microbiota through metagenomic and biochemical analyses. There results indicate that the obese microbiome can increasingly promote the ability to harvest energy from the diet. This change refers to several linked mechanisms, such as the microbial fermentation of dietary polysaccharides, intestinal absorption of monosaccharides and short-chain fatty acids and the conversion in the liver to more complex lipids, and microbial regulation that promote deposition of the lipids in adipocytes of host genes. Such findings can lead us to view that the obese microbiota can be more efficient than lean microbiota at extracting energy from a stated diet. Drug Toxicity The gut microbiota participates in the metabolism of many medical drugs, and can result in individual difference in drug efficiency and toxicity. Michael Zimmermann et al.'s research combined gut commensal genetics with gnotobiotics to build a pharmacokinetic model for measuring metabolism of the nucleoside analog brivudine (BRV) across tissues, as to predict microbiome contributions to systemic drug and metabolite exposure. They found that BVU (hepatotoxic bromovinyluracil) will be concentrated in liver in those mice who have complicated microflora than germ-free mice, which may demonstrate that microbiota could be an enabling factor of drug toxicity. A quantitative understanding of microbial factors which determine microbiome contributions can be conducive to explain interpersonal variability in drug response and provide possibilities for personal medical treatments. Gut Inflammation The ability of the gut microbiota to differentiate pathogenic and harmless microflora seems to break down in the developed world as the chronic inflammatory disease of the gut commonplace has increasingly occurred sometimes without overt infections. The development of gut inflammation and allergy is critically related to the homeostasis between gut antigens and host immunity. The problem is that inflammation without infection is far more common than ever before as the result is that infectious diseases of the gut are mostly under control while gastrointestinal food allergies and idiopathic inflammatory conditions have come up more common. One explanation of this situation might be that the commensal flora can perform as an antigenic stimulus for the gut inflammation. As the relationship between the immune system and the commensal flora is precarious, any unrest in immune or epithelial homeostasis can lead to inflammation. Under such circumstance, the commensal flora seems to act as a surrogate bacterial pathogen, which is thought to be lifelong as the host response is incapable to eliminate the flora. There are experiment models which can prove that immune responses to the flora can cause IBD(Inflammatory bowel disease), for example, in human leukocyte antigen-B27(HLA-B27) transgenic rats, monoassociation with Bacteroides vulgatus can induce colitis, while Esherichia coli elicit no lesions. Conclusion The gut microbiota has intimate relationship with its host. On the one hand, the gut microflora helps its host to achieve nutrition, especially from those indigestible carbohydrates, and plays an important role in immune system while defending extraneous or intraneous pathogen, or it can even help maintaining body temperature during cold exposure by benefiting metabolic homeostsais. But however, on the other hand, dysbacteriosis can result to detrimental consequences, such as obesity, drug toxicity, and maybe diseases seem have no connection to the intestinal microbiota, like alcoholic fatty liver or Parkinson. The metabolic mechanism between the gut microbes and their hosts is so complicated that it could not be simply defined as beneficial or harmful. As a dynamic process, the complexity on variety and mechanism of the gut microbes remains more researches and has potential prospects, such as personalized medicine. Although there are many effects to be researched, the impact of gut microbes on human health and happiness is crucial and will be of great significance.
. UNLABELLED Birch pollens are known as seasonal asthma precipitants. Our earlier studies evidenced a very high frequency of positive results bronchial allergen challenges in pollinosis patients sensitive to grass pollen. The aim of the study was to evaluate how often the bronchial challenge with birch pollen allergen causes bronchoconstriction. MATERIAL AND METHODS Studies were performed outside of pollen season on 30 patients sensitive to birch pollen allergen. Before the allergen challenges bronchial provocation tests with methacholine were performed in all subjects. RESULTS About 13% of examined group had bronchial hyperreactivity (PC20FEV1Mch < 8 mg/ml) and 50% demonstrated bronchoconstriction after birch pollen allergen inhalation. CONCLUSIONS About 13 percent of patients sensitive to birch pollen demonstrated nonspecific hyperrectivity out of pollen season. Bronchial birch allergen challenge tests are positive in about half of birch sensitive patients with pollinosis.
package com.usthe.tom.dao; import com.usthe.tom.pojo.entity.AuthResource; import com.usthe.tom.pojo.entity.AuthRoleResourceBind; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import java.util.List; /** * @author tomsun28 * @date 16:43 2019-07-27 */ public interface AuthRoleResourceBindDao extends JpaRepository<AuthRoleResourceBind, Long> { /** * Query the resources owned by the current role * @param roleId roleId * @return resource list */ @Query("select rs from AuthResource rs, AuthRoleResourceBind bind " + "where rs.id = bind.resourceId and bind.roleId = :roleId") List<AuthResource> findRoleBindResourceList(@Param("roleId") Long roleId); /** * delete record which roleId and resource equals this * @param roleId roleID * @param resourceId resourceId */ @Query("delete from AuthRoleResourceBind bind " + "where bind.roleId = :roleId and bind.resourceId = :resourceId") void deleteRoleResourceBind(@Param("roleId") Long roleId,@Param("resourceId") Long resourceId); }
. AIM OF THE STUDY The purpose of this study was to report in acute childhood idiopathic thrombocytopenic purpura (ITP) the current practices of French paediatric hematologists and to compare them to recent publications of American and British teams. METHOD A questionnaire was sent online to the members of the French Society of Pediatric Hematology/Immunology (SHIP). This questionnaire, adapted from a similar american study conducted in 2001, asked 16 questions based on the clinical presentation of a 5-year-old boy referred for an acute ITP. RESULTS 59/123 SHIP members responded to the survey. In response to question regarding initial treatment, 86% of physicians would be given active treatments and only 9% would rarely or never administer any drug. When asked which agent would be used in case of treatment, 68% would choose to prescribe intravenous immunoglobulins and 32% corticosteroids, nobody recommended the use of anti-D immunoglobulins. Furthermore, 83% would usually hospitalize such a child. CONCLUSION Finally, this study allowed us to update the current French management of treating pediatric ITP which is almost comparable among this subset of pediatric hematologists, but showed some discrepancies comparatively to the American and British studies.
Amira Behari (pictured) wore a long leather coat and gloves as well as her traditional Islamic garb A Muslim woman who refused to remove her burka to testify against a man accused of abusing her has been warned by a German judge she will be jailed if she does not comply. Amira Behari, 43, refused to reveal her face at the State Court in Munich last year when she appeared to testify against a man who allegedly abused her in a train station. The man, identified only as Kai.O, allegedly called her an 'a******e' and told her to 'go back to where you belong.' Judge Thomas Mueller said at the original hearing in November he wanted to see her face to 'read her emotions,' adding: 'I need to see you otherwise there will be considerable problems in adjudicating your case.' Behari refused, saying: 'I have a God at the end of the world who will see me right at the end. I will not do this.' In court she wore a niqab, which is an ultra-burka with only a gap for the eyes. She also wore gloves and a long leather coat. Judge Mueller gave up and said he had no alternative but to find the accused not guilty. But his decision enraged judges anjd prosecutors in Germany who appealed the decision and have now scheduled the case again for next week. The prosecutor's office in Munich consulted with Koranic experts who said it was permissible for a woman to remove her niqab before the judicial authorities such as judges, police, prosecutors on the basis of needs and damage prevention.' If she fails to comply on March 17, she faces a fine or even a spell in jail.
<filename>pkg/proxy/proxy.go package proxy import ( "github.com/proxy-server/internal/pkg/request" ) type ProxyManager struct { proxyRepository request.Repository } func NewProxyManager(proxyRepository request.Repository) *ProxyManager { return &ProxyManager{ proxyRepository: proxyRepository, } }
FIFTY professional examination writers have been arrested in the ongoing Unified Tertiary Matriculation Examination (UTME), the Joint Admissions and Matriculation Board (JAMB) said yesterday. JAMB’s spokesperson Fabian Benjamin made the revelation in the agency’s weekly bulletin. Fabian identified some of the professional examination writers as Masters’ degree holders, postgraduate and undergraduate students of various tertiary institutions in the country. He said the professional writers connived with owners and operators of Computer Based Test Centres (CBTs) to carry out the crime by engaging in multiple registrations where the fingerprints of the main candidates are captured side by side with that of the impostors. The bulletin reads: “The board has uncovered a group of professional examination writers who take the exams for prospective candidates for a fee. “Some of the fraudsters are Masters’ Degree holders, postgraduate and undergraduate students of tertiary institutions. Dr. Fabian said the culprit confessed to have done multiple registrations of his biometric along with several other candidates with the intention of sitting for them and collecting huge sums of money from the candidates. The bulletin further said: “This Development is the fundamental reason for the decision of the board to revalidate all biometrics of candidates that have taken the board’s examination in recent times.
Clinical Characteristics and Outcomes of Healthcare-Associated Hematogenous Vertebral Osteomyelitis Abstract Background The incidence of hematogenous vertebral osteomyelitis (HVO) has increased over recent years, likely due to longer life expectancies, higher prevalence of chronic disease, and more effective diagnostic techniques. Recently, healthcare-associated infections, such as catheter-related and procedure-related bloodstream infections, also increase the risk of HVO. The aim of our study was to evaluate the clinical characteristics and outcomes of healthcare-associated HVO (HCA-HVO). Methods We conducted a retrospective chart review of adult patients with microbiologically diagnosed HVO from five tertiary-care hospitals over 8-year period. HCA-HVO was defined as onset of symptoms after 1 month of hospitalization or within 6 months after hospital discharge, or ambulatory manipulations in the 6 months before the diagnosis . We compared the clinical characteristics and outcomes of HCA-HVO with community-acquired HVO (CA-HVO) cases. Results In total, 358 patients with microbiologically diagnosed HVO were included in final analysis. Of these 358 cases, 256 (63.1%) were CA-HVO and 132 (36.9%) were HCA-HVO according to the predefined criteria. The main causative pathogens identified were methicillin-susceptible Staphylococcus aureus (32%), followed by methicillin-resistant S. aureus (MRSA) (26%), aerobic gram-negative bacteria (24%), and Streptococcus species (11%). Compared with CA-HVO cases, patients with HCA-HVO had more neoplasm (13.6% vs. 5.8%, P = 0.01) and end-stage renal disease (8.3% vs. 2.2%, P = 0.007). MRSA was more frequent pathogens in HCA-HVO cases than in CA-HVO (37.1% vs. 17.7%, P = 0.01). Patients with HCA-HVO were more likely to be have the higher rates of persistent bacteremia for ≥7 days (24.2% vs. 15.5%, P = 0.04), 1-year mortality (18.2% vs. 11.5%, P = 0.08) and 1-year relapse (12.1% vs. 6.2%, P = 0.051). Conclusion In this study, more than one-third of HVO is health care associated. Patients with HCA-HVO were more likely to have underlying illness, and their causative pathogens were more frequently MRSA. Outcomes of HCA-HVO were poorer, which require prevention measures and early diagnosis. Disclosures All authors: No reported disclosures. Background. Bone biopsy is considered the gold standard for diagnosis and treatment of osteomyelitis (OM), but few studies have investigated the extent to which it influences antimicrobial therapy in non-vertebral bones. The purpose of this study was to evaluate clinician-initiated changes to empiric antimicrobial therapy after obtaining bone biopsy results. A secondary aim was to identify predictors of a positive bone culture. Methods. We retrospectively reviewed all cases of non-vertebral OM in patients who underwent image-guided bone biopsies between 2009 and 2016. Data on pathologic and microbiologic yield were collected and logistic regression was used to determine potential factors affecting the microbiologic yield. Post-biopsy empiric antibiotics and final antibiotics were compared with determine if there was a change in antibiotic treatment after biopsy results were reported. Results. We evaluated 203 bone biopsies in 185 patients. Samples from 115 (57%) cases were sent to pathology, of which 33 (29%) confirmed OM. All samples were sent to microbiology and 57 (28%) yielded a positive result. Diabetes (OR=2.39, P = 0.021) and white blood cell count (OR=1.13, P = 0.006) were significantly associated with positive bone cultures in multivariate analyses. There was no association between positive cultures and number of samples cultured, needle size, prior antibiotic use, or antibiotic-free days. Post-biopsy empiric antibiotics were given in 138 (68%) cases. Therapy was narrowed to target specific organisms in seven cases and changed due to inadequate empiric treatment in three cases. Targeted therapy was initiated in 4/65 cases, in which empiric antibiotics had been initially withheld. While final antibiotics were withheld in 38/146 with negative bone cultures, empiric antibiotics were discontinued in only eight cases. Conclusion. In patients with non-vertebral OM, bone biopsy cultures rarely yielded results that necessitated changes in antibiotic management. Identified bone organisms were treated by empiric therapy in most patients. While bone biopsy remains the gold standard diagnostic test for OM, further work is needed to identify patients whose management may be impacted by this procedure. Disclosures. All authors: No reported disclosures. Methods. This is a single center, retrospective, cohort study of children 2-17 years of age with a discharge diagnosis of acute osteomyelitis admitted between 1/1/2012 and 12/31/2015. Complicated osteomyelitis cases were excluded. Variability of the initial mg/kg antibiotic dose was determined and compared between healthy weight, overweight, and obese children. Conclusion. There was significant variability and lower overall dosing of first-generation cephalosporins among obese children compared with healthy weight children. Given the increasing incidence of invasive methicillin-susceptible Staphylococcus aureus infections, this study highlights the need for practitioners to
/*******CONSTRUCTION*******/ /* Creates a new CurlObject object and returns an error code * on error. If an error occurs, c_obj will be set to null. * * Return Codes: * CurlObjectError * CURLcode */ int CurlObject_new(CurlObject** c_obj, const char* url) { int err; if(!c_obj) return(CURL_OBJECT_BAD_ARG); *c_obj = NULL; *c_obj = (CurlObject*)malloc(sizeof(CurlObject)); if(!*c_obj) return(CURL_OBJECT_MEM_ERR); (*c_obj)->curl = curl_easy_init(); if(!(*c_obj)->curl) { free(*c_obj); *c_obj = NULL; return(CURL_OBJECT_MEM_ERR); } if(url != NULL) { err = curl_easy_setopt((*c_obj)->curl, CURLOPT_URL, url); if(err != CURLE_OK) { curl_easy_cleanup((*c_obj)->curl); free(*c_obj); *c_obj = NULL; return(err); } } err = curl_easy_setopt((*c_obj)->curl, CURLOPT_WRITEFUNCTION, parse_data); if(err != CURLE_OK) { curl_easy_cleanup((*c_obj)->curl); free(*c_obj); *c_obj = NULL; return(err); } err = curl_easy_setopt((*c_obj)->curl, CURLOPT_WRITEDATA, *c_obj); if(err != CURLE_OK) { curl_easy_cleanup((*c_obj)->curl); free(*c_obj); *c_obj = NULL; return(err); } (*c_obj)->headers = NULL; (*c_obj)->buff = NULL; (*c_obj)->url = (char*)url; (*c_obj)->output_type = OUTPUT_DEFAULT; (*c_obj)->Get = &CurlObject_get; return(CURL_OBJECT_OK); }
def find_end_of_component(file_desc: io.BufferedReader, component: str, end_tags: tuple = ()): end_tags_of_component = ['</{}>'.format(component), end_of_component_tag.lower(), end_of_nnet_tag.lower(), *end_tags, *['<{}>'.format(component) for component in supported_components]] next_tag = find_next_tag(file_desc) while next_tag.lower() not in end_tags_of_component: next_tag = find_next_tag(file_desc) return next_tag, file_desc.tell()
People's Choice Credit Union History People's Choice Credit Union represents a series of credit unions that have merged since its 1949 origin, including the Northern Territory Credit Union. In August 2009, it announced plans to merge with another credit union, Savings & Loans, with the first post-merger branch opened in December 2010. On 27 June 2011, members of the credit union voted to change the name to People's Choice Credit Union. This change was adopted on 18 July 2011. People's Choice turned 70 in April 2019. Structure and Regulation People's Choice is a credit union, a member-owned structure where its customers are also shareholders. This avoids the conflict of listed entities where directors owe their primary duty to maximise profits in the interest of their shareholders, potentially compromising treatment of customers. People's Choice is an authorised deposit-taking institution (ADI) supervised by the Australian Prudential Regulation Authority (APRA). As a financial institution, it is also supervised by the Australian Securities and Investments Commission (ASIC) and, with its financial planning business, holds both an Australian Financial Services Licence and an Australian Credit Licence. It is regulated by the Banking Act 1959 (Cth) and the Corporations Act 2001 (Cth), and its members are covered by the $250,000 deposit guarantee under the Federal Government's Financial Claims Scheme. Funding People's Choice recently completed a $650 million offering of Residential Mortgage Backed Securities (RMBS). The offering was launched at $500 million but was extended to $650 million after receiving $1.4 million in bids. Light Trust 2019-1 was priced at 102 basis points above the 1-month Bank Bill Swap Rate, making it one of the best-priced mature offerings in recent times. Affordable and liveable housing People's Choice launched its analysis of Adelaide's suburbs for their affordability and liveability in April 2019. It launch its study of Melbourne suburbs in October 2019, together with its second Adelaide report. Both Melbourne and Adelaide reports are to be released every six months.
Apropos my previous blog posting, I am simultaneously amused and perplexed by the number of people who, being afficionados of [X], read a statement of the form "I do not like [X]" and parse it as "[X] is bad". (Tag with: collective logic FAIL, what are they teaching them these days, death of western civilization, film at 11, etcetera.) Mind you, it's given me some food for thought. In particular, I'm trying to figure out precisely what it is about the structure of small-screen entertainment that is inimical to the production of high-quality space opera; and why SF on TV is so generally identified with that form. I suspect, after sleeping on it, that in large part it boils down to the cost structure of network TV: to the obligation on the producers to deliver captive eyeballs to advertisers. This is guaranteed to fuck with dramatic structure, world-building, and characterization — especially when they mess with the plot to reduce audience leakage due to channel-hopping during intermissions — and it has long-term implications for written fiction too, as the uptake of ebooks and alternative delivery models based on the internet progresses. Consider a script. A script consists of pages each of which represents one minute of on-screen action. It typically runs to 250 words, most of which are dialog. A 42 minute TV show is 10,500 words (a novelette, in fiction-not-script terms), but breaks down into four scenes, each of which needs a near cliff-hanger ending (prior to the advertising break, to keep the viewers wanting to see more), and a restart at the beginning (to drag in new viewers who have channel-hopped over from a less compelling production). Of each roughly 2,500 word scene, then, about 250-500 words will be wasted (dramatically speaking) on reestablishing the action, and the last 500-1,000 words goes on setting up a mini-climax (except in the first and final scenes, where you need a setup and a climax for dramatic, not advertising, purposes). Thus, the 10,500 word script actually contains about 7,000-8,000 words of meat, or 28-32 minutes of non-repetitive on-screen action to propel the story forward. (As a reference point, a 8000 word short story, to an average reading speed of 350 words per minute, takes 22 minutes to plough through. I'm ignoring, of course, the need for additional background description in the short story — stuff that doesn't belong in a script.) Here's the rub: the ideational density of a TV or film production, to a viewer experiencing it in real time, is lower than that of a work of written fiction is to a reader — an hour of TV with ads (and spurious scene-based setup/teardown) is equivalent to 20-25 minutes of written fiction. To keep the viewers from getting bored, it needs to add eyeball candy of some kind. What pushes primate attention buttons? Sex (hot actors) and bright colours and loud noises (explosions in spaaaaace!). These are low-level hard-wired stimuli that we can't easily ignore: if we could, we wouldn't be human. So in it goes. But there's an arms race going on: every other series on TV is doing the same thing, so our series has to be sexier and flashier than theirs if we're not going to bleed audience share. Sooner or later there comes a point where the audience can no longer ignore the fact that their buttons being pushed — not stroked lightly, but mashed hard by an insensitive thumb driven by advertising sales — and that's when they'll start leaving in droves. But most people have been trained to accept lots of advertising and the classic four-part structure of the ongoing TV drama episode from an early age. (Not me. Due to an accident of childhood, I watched virtually no commercial TV, and didn't have access to a colour TV or a video recorder until I was in my 20s. Yes, I am an alien.) Two questions arise: Firstly. Is it possible to do space opera on the small screen properly, if the constraints imposed by the necessity of slotting in with the network advertising model are replaced by some other revenue structure? (I'm purposely ignoring the BBC drama department in this context because (a) their programming schedule isn't too dissimilar to commercial network TV, and (b) they're not notably into space opera.) Secondly. If written commercial fiction succeeds in moving online, are we going to see a breakdown of the 80-year-old contractual boilerplate that bans in-novel advertising: and if so, what are the literary consequences going to be? We know what they look like in dead tree form, and it ain't pretty — what I'm interested in is the electronic remix, because it sure as hell won't stop at static ads: we could see targeted audience demographic product placement in novels, tailored to the advertising profile of the particular reader (so that the product in question changes depending on who's reading the ebook).
<filename>include/debug/websocket_manager.h<gh_stars>10-100 #pragma once #include "debug/debugger.h" #ifdef FLOWCHART_DEBUG #include <vector> #include <unordered_map> #include <set> #include <websocketpp/config/asio_no_tls.hpp> #include <websocketpp/server.hpp> #include <unordered_set> typedef websocketpp::server<websocketpp::config::asio> WebsocketAsioServer; namespace asyncflow { namespace core { class Manager; class Chart; class Node; class Agent; class ChartData; } namespace debug { class WebsocketManager { typedef std::vector<websocketpp::connection_hdl> HDL_CONTAINER; public: WebsocketManager(core::Manager* manager); ~WebsocketManager(); void Init(const std::string& ip, int port); void StopDebugChart(core::Chart* chart); void StartQuickDebug(core::Chart* chart); void ContinueDebugChart(core::Chart* chart); private: WebsocketAsioServer server_; core::Manager* manager_; std::unordered_map<core::Chart*, HDL_CONTAINER> chart_map_; std::unordered_map<std::string, HDL_CONTAINER> quick_debug_map_; Debugger* debugger_; public: void StartDebugChart(core::Chart* chart, websocketpp::connection_hdl hdl); void StopDebugChart(core::Chart* chart, websocketpp::connection_hdl hdl); void QuickDebugChart(core::ChartData* chart, websocketpp::connection_hdl hdl); void SendReply(websocketpp::connection_hdl hdl, const std::string& msg); private: void OnMessage(websocketpp::connection_hdl hdl, WebsocketAsioServer::message_ptr msg); bool IsPortAvailable(const char* ip, int port); bool ContainsHdl(const HDL_CONTAINER& container, websocketpp::connection_hdl& hdl); void RemoveHdl(HDL_CONTAINER& container, websocketpp::connection_hdl& hdl); void SendStopData(core::Chart* chart, HDL_CONTAINER &hdls); public: void Step(); static int START_PORT; static std::string IP; //use for test const std::unordered_map<core::Chart*, HDL_CONTAINER>& GetChartMap() { return chart_map_; } }; class WebsocketDebugConnection : public DebugConnection { public: WebsocketDebugConnection(WebsocketManager* websocketManager_, websocketpp::connection_hdl hdl) : websocketManager_(websocketManager_) , hdl_(hdl) {}; void StartDebugChart(core::Chart*) override; void StopDebugChart(core::Chart*) override; void QuickDebugChart(core::ChartData*) override; void ContinueDebugChart(core::Chart*) override; void Reply(const std::string& msg) override; private: WebsocketManager* websocketManager_; websocketpp::connection_hdl hdl_; }; } } #endif
#pragma once #include "metawear/platform/cpp/concurrent_queue.h" #include "metawear/platform/cpp/task.h" #include <functional> #include <memory> struct AsyncCreator { virtual ~AsyncCreator(); ConcurrentQueue<std::function<void (void)>> pending_fns; std::shared_ptr<Task> timeout; void create_next(bool force); };
<reponame>betanzos/ainoha-core /** * Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ainoha.core.validators; import javafx.scene.control.TextInputControl; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Allows to add validations to the annotated {@link TextInputControl} (should be used only with subtypes of * this class).<br> * <br> * This validations avoid entering invalid text. Because of this, validation is carried out for each data entry * attempt. If the text control once the entry is made is invalid, the entry will be rejected.<br> * <br> * * @author <NAME> * @since 1.0 */ @Target(ElementType.FIELD) @Retention(RetentionPolicy.RUNTIME) public @interface InputValidator { /** * Regex defining the text pattern */ String pattern() default ".*"; /** * Max capacity of the text field.<br> * <br> * Values less than or equals to 0 will disable the validation */ int maxLength() default 0; }
Rapid identification of mutations caused by fast neutron bombardment in Medicago truncatula Background Fast neutron bombardment (FNB) is a very effective approach for mutagenesis and has been widely used in generating mutant libraries in many plant species. The main type of mutations of FNB mutants are deletions of DNA fragments ranging from few base pairs to several hundred kilobases, thus usually leading to the null mutation of genes. Despite its efficiency in mutagenesis, identification of the mutation sites is still challenging in many species. The traditional strategy of positional cloning is very effective in identifying the mutation but time-consuming. With the availability of genome sequences, the array-based comparative genomic hybridization (CGH) method has been developed to detect the mutation sites by comparing the signal intensities of probes between wild-type and mutant plants. Though CGH method is effective in detecting copy number variations (CNVs), the resolution and coverage of CGH probes are not adequate to identify mutations other than CNVs. Results We report a new strategy and pipeline to sensitively identify the mutation sites of FNB mutants by combining deep-coverage whole-genome sequencing (WGS), polymorphism calling, and customized filtering in Medicago truncatula. Initially, we performed a bulked sequencing for a FNB white nodule (wn) mutant and its wild-type like plants derived from a backcross population. Following polymorphism calling and filtering, validation by manual check and Sanger sequencing, we identified that SymCRK is the causative gene of white nodule mutant. We also sequenced an individual FNB mutant yellow leaves 1 (yl1) and wild-type plant. We identified that ETHYLENE-DEPENDENT GRAVITROPISM-DEFICIENT AND YELLOW-GREEN 1 (EGY1) is the candidate gene for M. truncatula yl1 mutant. Conclusion Our results demonstrated that the method reported here is rather robust in identifying the mutation sites for FNB mutants. Supplementary Information The online version contains supplementary material available at 10.1186/s13007-021-00765-y. Introduction Plant mutant collections are very useful genetic resources and widely used in forward and reverse genetic studies. Mutants can be induced by different mutagens, such as chemical agents or biological factors. Ethyl methane sulfonate (EMS) and Transfer DNA (T-DNA) are two popular mutagens that typically induce point nucleotide substitution and T-DNA insertional mutation, respectively. Although EMS mutation is relatively easy to be generated, it always induces numerous background mutations and thus requires substantial efforts to identify causative mutation sites. By contrast, T-DNA insertion mutations typically harbor fewer background mutations and the mutation sites could be identified by the flanking sequences theoretically. However, T-DNA mutation Open Access Plant Methods *Correspondence: [email protected] Huan Du and Zhicheng Jiao authors contributed equally to the work 1 Guangdong Key Laboratory for Innovative Development and Utilization of Forest Plant Germplasm, College of Forestry and Landscape Architecture, South China Agricultural University, Guangzhou 510642, Guangdong, China Full list of author information is available at the end of the article requires the procedure of plant tissue culture and usually takes a long period to accumulate a large number of mutants' collections. FNB uses high-energy neutrons to irradiate plant seeds and mainly induces random deletions of various sizes on chromosomes. Because deletions likely cause either complete/partial loss of the corresponding genome fragments or induce frame-shift mutations, FNB is a very powerful mutagen to generate null mutations. In addition, the background mutations are much less in FNB mutants as compare to the EMS mutants, making it easier for further causative gene cloning work. Hence, FNB has been broadly used in creating mutant libraries in many model and crop plants, including Arabidopsis thaliana, M. truncatula, rice (Oryza sativa), and soybean (Glycine max). Despite the efficiency of FNB in mutating plant genomes, it is still challenging to identify the mutation sites. The classical positional-cloning method certainly could locate the candidate gene, it requires a segregating population derived from the intercross between the FNB mutant and another accession, and complex genetic linkage analysis for the molecular markers with mutation site. Consequently, it could take years to identify the candidate mutation. To accelerate the process of mutation identification, few new methods including CGH and Deletion-TILLING (De-TILLING) have been developed. In addition, Ge et at. reported a successful case using the Affymetrix microarray-based expression profiling dataset to identify the causative DNA deletion in M. truncatula. These new methods were principally based on the DNA fragment deletion of substantial size that could lead to the decrease of DNA hybrid signal compared with wild-type or down-regulation of few adjacent genes, the limitation mainly lies in the relatively low resolution and accuracy. Along with the development of next-generation sequencing technology, several new algorithms/tools have been developed to detect DNA fragment deletions of various sizes, such as Pindel, BreakDancer and FNBtools. FNBtools is particularly specialized for FNB mutants. It took advantage of the CIGAR information from the informative reads extracted from the short reads alignment files to locate the DNA breaking point and used a segregating population to detect the linkage between the deletion and the mutant phenotype. These new methods continually improve the accuracy and reliability in predicting the structural variations on chromosomes. However, due to the complexity of genomes, particularly the duplications resulted from the whole genome duplication, we found that it is still challenging in detecting reliable mutations in many cases. In some cases, the reported deletions reflected false positives. In this work, we hypothesized that small deletions in FNB mutants can be viewed as polymorphic sites between mutants and wild-type plants, while large-size deletions can be viewed as presence-absence variations (PAV), which typically show extremely low or even no reads coverage in mutants, in contrast to the parallel datasets of controls which should display normal reads coverage. Based on this hypothesis, we developed a straightforward pipeline to detect the mutation sites of FNB mutants by combining the variant-calling pipeline and customized filtering of the variants. To establish the linkage between the identified mutation and the mutant phenotype, we applied the pipeline to the homozygous mutant and wild-type like plants derived from an F 2 population and identified the mutated gene responsible for the phenotype. The pipeline for calling and filtering deletions of FNB mutants in M. truncatula In order to identify the associated deletions linked to the mutants' phenotypes, the mutants are usually backcrossed to the wild-type plants. Backcrossing not only purifies the background, but also creates a segregating population that can be used for Bulked Segregant Analysis coupled with Whole-Genome Sequencing (BSA-Seq). For BSA-Seq, if the F 1 plants show wild-type like phenotype and the ratio of mutant to wild-type like phenotype is 1:3 in F 2 progeny, then the mutant is likely to be recessive. In this scenario, the mutant plants are pooled for DNA sequencing. The wild-type like plants, which are a mixture of both homozygous and heterozygous genotype, are also pooled for DNA sequencing. If F 1 plants do not show wild-type phenotype and the ratio of mutant phenotype to wild-type like phenotype is significantly away from 1:3, the mutant is likely not to be recessive. In this scenario, F 3 generation is used to identify homozygous mutant and wild-type like plants, which do not display phenotypic segregating in F 3 progeny. The homozygous mutants are pooled for sequencing, as well as the wild-type like plants. In case, if urgent identification of candidate deletions is needed and segregating population is not available, the individual FNB mutant plants are directly pooled and sequenced. In addition, the wild-type plants are also pooled and sequenced. To achieve adequate coverage, we propose at least a 20 depth of the Illumina paired-end sequencing. After the quality control procedures for the raw reads, a variant discovery calling pipeline is used to identify the mutations of the FNB mutants (Figs. 1 and 2). We recommend the GATK pipeline for a parallel calling for a cohort of independent FNB mutants simultaneously, which would contribute to filter the background mutations. If the mutant is recessive, the genotype of the mutant pool is a homozygous mutation, whereas the genotype of the corresponding wild-type like plants pool is heterozygous mutation. In contrast, wild-type plants and other irrelevant mutants are all wild-type genotype. If the mutant is not recessive, the mutant pool is a homozygous mutation, and wild-type pool, which is made of the wild-type like plants that do not show phenotypic segregating in F 3 generation, is a homozygous wild-type genotype. The wild-type plants and other irrelevant mutants are also homozygous wild-type genotype. For the individual mutant, the mutant pool is a homozygous mutation, whereas all other pools and wild-type plants are wildtype genotypes. Based on these criteria, the variants are filtered for each mutant. The deletions that pass the filtering are further compared with the genome annotation file. If the mutations fall within the gene coding regions, Fig. 1 The step-by-step workflow diagram to identify candicate causative deletions for FNB mutants. FNB mutants, either from a segregating population or individual mutant, were sequenced by NGS. The wild type plants, and the wild-type like plants from the same segregating population of FNB mutants were sequenced as well. After the joint variants calling and fitering, deletions were verified by Sanger sequencing, and the causative deletion was identified. The numbers in the brackets represet the genotypes (0/0: homozygous wild type; 0/1: heterozygous; 1/1: homozygous mutant) they can be considered candidate mutations. Additionally, the short reads alignment files, such as BAM files, are used to visually confirm the mutations. Finally, the candidate deletion borders are amplified and further confirmed by Sanger sequencing. A case study, identification of the causative gene for an FNB mutant using FNB-BSA-Seq in M. truncatula To evaluate the effectiveness of the pipeline and identify the causative gene for a fixation-minus (fix -) FNB mutant, which developed white and small nodules, named as white nodule (wn) (Fig. 3a, b), we applied BSA-Seq pipeline to the wn mutant. wn was first backcrossed to wild-type M. truncatula (cv. Jemalong A17). In the F 2 generation, 14 mutant plants that developed fixnodules and 46 wild-type like plants that produced normal nodules were observed. The chi-square test indicated that the mutant to wild-type segregation ratio fits 1:3, suggesting that wn is a single recessive mutation. The mutant plants were all pooled for WGS. As the control, 25 wild-type like plants were also pooled for WGS, as well as the wildtype plant A17 that can serve as an additional control. For the mutant pool, more than 42.6 million paired-end reads (2 150 bp) were produced, which represents a 29.7 sequencing depth (Table 1). For the wild-type like plants pool and A17 pool, 48.8 and 43.1 million reads were produced, representing 34.1 and 30.1 sequencing depth respectively ( Table 1). The reads were mapped to the latest reference genome of M. truncatula (Mtru-nA17r5.0) and the cohort polymorphisms were called using the mapped reads. Since the mutant is recessive, the causative mutation must be homozygous deletion, whereas the pool made of the wild-type like plants should be heterozygous genotype, and A17 plants should be wild-type genotype. On the basis of this guideline, the identified deletions were further filtered. In the mutant pool, totally nineteen homozygous deletions, spanning one to eighteen base pairs, were found, and the The deletions were further visually confirmed by visualizing the short reads alignment details in Integrative Genomics Viewer (IGV). Among the nineteen deletions, sixteen are located in the intergenic regions, and two are located in the untranslated regions (UTRs) and do not affect the coding sequences. The only one located in the coding region is a single base pair deletion, which caused the frameshift of MtrunA17_ Chr3g0119041 (Fig. 3c and Additional file 2: Table S1). The deletion was further confirmed by Sanger sequencing (Fig. 4a). MtrunA17_Chr3g0119041 encodes a Serine/Threonine Kinase (STK), also termed as SymCRK in a previous report (Fig. 4b). It has been documented that SymCRK controls the senescence process of nodules in M. truncatula. The deletion in the coding region of SymCRK caused the defect in the STK catalytic domain. Similar to wn mutant, symcrk mutant also produced white and necrotic nodules. These results suggested that 1-bp deletion in SymCRK caused the defective nitrogen fixation phenotype of wn mutant, reflecting that SymCRK was the causative gene of wn mutant. A case study, identification of the causative deletion for an individual FNB mutant in M. truncatula To evaluate the effectiveness of the pipeline for individual FNB mutants, we applied the pipeline to an FNB individual mutant, which developed yellow leaves and was named yellow leaves 1 (yl1). Phenotypic analysis indicated that the chlorophyll concentration of yl1 mutant was significantly lower than A17 plants, resulting in yellow-green cotyledon and leaves (Fig. 5a). To rapidly identify the possible causative gene for yl1, yl1 plants were pooled and directly sequenced by WGS. Totally 57.4 million paired-end reads were produced for (Table 1). For polymorphisms calling, both yl1 and A17 pools were called simultaneously. The raw polymorphism data were first filtered by the quality, followed by the genotype. In the filtering process, we detected a large deletion which is about one kilobase pair (kb). This large deletion presents in both yl1 and A17, thus it was not considered a candidate deletion (Additional file 1: Fig. S1). The deletions that were only presented in yl1 pool were considered potential candidates. In total, 115 deletions existed in yl1 pool only. Among these deletions, two were located in the coding region of genes on chromosome 7, and caused the frameshift mutation for the corresponding genes (Additional file 3: Table S2). Visualization of the short reads alignment files in IGV indicated that the candidate deletions were well supported by the reads alignment (Fig. 5b). One of the candidate deletions is located in MtrunA17_ Chr7g0223481, which encodes a putative stigma-specific protein. The 1-bp deletion caused the frameshift of MtrunA17_Chr7g0223481. MtrunA17_Chr7g0223481 is the homolog of At1g50650 in Arabidopsis thaliana, which encodes the STIG1 family peptide KERBEROS (KRS). KRS regulates the development of embryo sheath, and krs mutant showed the absence of sheath production by endosperm. Another deletion was located in the second exon of MtrunA17_Chr7g0253961, which encodes a putative peptidase M50 protein (Fig. 5b). The 7-bp deletion caused frameshift mutation in MtrunA17_ Chr7g0253961 and the loss of S2P/M50 (Site-2 protease/zinc metalloproteases) domain, which is critical for the proper function of the protein (Fig. 6a, b). Based on the homolog analysis, we found that the MtrunA17_ Chr7g0253961 is the homolog of At5g35220 in A. thaliana, which encodes ETHYLENE-DEPENDENT. The A. thaliana egy1-1 mutant developed yellow-green leaves, which similar to the leaves of yl1 mutant. Thus, it is highly likely that the EGY1 homologous gene, MtrunA17_ Chr7g0253961, is the causative gene of yl1. Discussion Mutagenesis is a very important tool to dissect genes' function in plant genetic research. FNB, which employs high-energy neutron as the mutagen, is very effective in generating various deletions on the chromosomes. Owing to its mutagenesis efficiency in generating null mutations, FNB has long been used to create large-scale genetic resources for forward and reverse genetics studies, and even used as a breeding technique for many species. Due to the complexity of plant genomes, identifying the causative deletions for FNB mutants has always been challenging in many studies, particularly before the era of NGS. Positional cloning was one of the most popular methods to identify the causative gene for FNB mutants of interest. However, it was rather time-consuming. In order to increase the efficiency in identifying the mutations, new approaches such as CGH and Deletion-TILL-ING have been developed. These new approaches provided new solutions in dealing with the FNB mutants. However, the effectiveness and accuracy are still to be improved for such probe or PCR-based methods. These disadvantages have limited the utilization of FNB mutants as an invaluable genetic resource. The rapid progressing of NGS technology has greatly renovated the strategy in detecting the whole genome polymorphisms and offers many advantages over the traditional methods in identifying the mutations for FNB mutants. With continuous cost reduction, NGS has been a regular tool and accessible to most researchers. WGS by NGS for FNB mutants not only increases the sensitivity in detecting mutations on chromosomes, but also improves the reliability and resolution in identifying deletions. In this work, we developed a useful and straightforward pipeline for the identification of candidate deletion for FNB mutants in M. truncatula. The pipeline takes full advantage of high-depth WGS and advanced deletion-calling algorithm and presents a viable solution in identifying the candidate causative genes for the FNB mutants. The pipeline developed by this work is effective in identifying the causative deletions for FNB mutants, as demonstrated by the application of pipeline to the two representative FNB mutants, one in a segregating population and another in individual FNB mutant. FNB-BSA-Seq has detected a single base pair deletion in SymCRK coding sequence of wn mutant. The 1-bp deletion in wn mutant caused frameshift null mutation of SymCRK and led to white and necrotic nodules, which resembles the phenotype of symcrk mutant. The 1-bp deletion is rather small and would not be identified by the probe-based approaches. Combining 30 deep sequencing and deletion calling/filtering pipeline, the 1-bp deletion of wn mutant stood out with high-quality scores in the report of the pipeline, demonstrating that the pipeline is very effective in identifying small deletions of FNB mutant. For yl1 mutant, there is no segregating population, thus we are unable to establish the linkage between the deletion and mutant. However, a mutant without a useful segregating population is very common in the practices of genetic researches, as generating a segregating involves multiple steps and progenies, and usually takes a long time, especially for those plants with long life cycles (M. truncatula typically has a life cycle from four to six months). It would be very useful if the candidate deletions could be identified for the individual FNB mutant. In our practice, the pipeline reported only two candidate deletions that are located in the coding region of genes. This is probably because that FNB mutant usually has a less background mutation compared with the mutants induced by other chemical mutagens like ethyl methanesulfonate (EMS). The two candidate deletions of yl1 mutant are 1-bp and 7-bp respectively, both causing the frameshift in the coding sequences. Through protein homologs and annotation searches, we found that the deletion in MtrunA17_Chr7g0223481 is likely the causative mutation of yl1, since MtrunA17_Chr7g0223481 is the homolog of Arabidopsis EGY1, whose mutation led to the similar phenotype of yl1. Taken together, the example of yl1 mutant demonstrated not only the effectiveness of the pipeline in identifying the deletion for the FNB mutants, but also the possibility of application of the pipeline to the individual FNB mutant for causative gene cloning. WGS uses high coverage short reads to examine the homogeneity between the reference genome and short reads. Sequencing depth is an important parameter that determines the sensitivity and reproducibility in detecting variations among genomes. In this work, we proposed a minimum of 20 sequencing depth for mutation detection. Given sufficient sequencing depth, WGS could theoretically detect all potential deletions caused either by artificial mutagenesis or natural variation, which has driven the utilization of WGS for indel calling in many studies. WGS usually generates a large-scale dataset including tens of millions of raw short reads, which require well-designed algorithms and intense data analyzing procedures to extract the polymorphic information. During the past decade, there are a few algorithms developed to detect the deletions from the WGS data. These algorithms/tools investigate the deletions on chromosomes from different angles, including using informative reads to calculate the deletions or reads coverage survey. In this work, we treated the small deletions of FNB mutant as the indel sites, which widely exist in the natural populations and have been tensely studied in recent years. There have been successful tools developed to identify such small indels, including GATK4's Haplo-typeCaller. Although it is not designed for FNB mutants, HaplotypeCaller performs well in terms of sensitivity and accuracy, possibly owing to its unique ability of denovo local assembly of haplotypes for the regions where a potential polymorphism occurs. GATK4's variant discovery workflow is well documented and maintained by Broad Institute. Applying this workflow to the WGS dataset of FNB mutants is straightforward and requires a minimal learning process, making it feasible for most researchers to rapidly identify the candidate deletions for FNB mutants. Conclusion Despite the progress in developing methods to identify deletions for FNB mutants, it remains challenging to rapidly isolate the causative mutations in many studies. Combining WGS, the variant discovery workflow, and filtering by comparing the genotype of FNB mutants and control lines, we developed a simple but efficient pipeline to rapidly identify the candidate causative deletions for FNB mutants in M. truncatula. As demonstrated by the two case studies, the pipeline combines sensitivity and accuracy in detecting mutations, and the filtering process by comparing with multiple controls is very useful to pinpoint the causative mutations. FNB mutant collections are available for many model and crop species. Due to the difficulty in rapidly identifying the causative genes, these invaluable resources have not been fully utilized. The principle and pipeline described here can also be applied to the FNB mutants of other species, offering a reliable solution to utilize FNB mutants for genetic research or crop breeding. Plant materials The M. truncatula plants were germinated in a petri dish and placed at 4 ℃ for one week. After germination, the plants were grown in a growth chamber at 22 ℃/16 h light and 20 ℃/8 h dark. To generate the F 2 segregating population, wn mutant was backcrossed to M. truncatula cv. Jemalong A17 and F 1 plants were selfed to generate the F 2 population. The cotyledon and leaf phenotypes of yl1 and A17 were analyzed one month and two months post-germination respectively. Rhizobia inoculation and nodule phenotype analysis The F 2 population of wn mutant was grown in sand and inoculation with rhizobia Sm2011 (Sinorhizobium meliloti 2011) as previously reported. The nodules of wide-type and wn were observed and analyzed with the stereomicroscope after 30 days post-inoculation. The plants with white and necrotic nodules were pooled as the mutant pool, and the plants with red and normal nodules were pooled as the wild-type like pool. DNA extraction and sequencing The DNA samples from leaves were extracted using the Trelief ™ Plant Genomic DNA Kit (Beijing TsingKe Biotech; TSP101). The DNA samples were sequenced on an Illumina Nova-seq platform with a 150-bp pairedend (PE) protocol. Sequence alignment, indel calling, and visualization The polymorphism calling was conducted according to GATK4's (GATK v4.1.7.0) instruction. Briefly, the 150-bp pair-end reads were mapped to the M. truncatula reference genome (MtrunA17r5.0) using the BWA-MEN (Burrows-Wheeler Aligner; Version 0.7.12-r1039). For reads alignment, mem algorithm was used with a minimum seed length of 19, band width of 100, and off-diagonal X-dropoff of 100. After sorting, adding reads groups, validating, and marking for duplicates, the mapped reads in bam format were used to call variants by HaplotypeCaller in GVCF mode. For the calling step, the assembly-region-padding was set to 100, the base-quality-score-threshold was set to 19, the max-reads-per-alignment-start was set to 50, and the max-assembly-region-size and min-assemblyregion-size were set to 300 and 50, respectively. The individual GVCF files were subjected to joint variant calling. The indels were filtered from the variants after calling using SelectVariants function of GATK. The deletions were selected from the indels for further analysis. The IGV (v2.8.2) software was used for further visual verification of deletions of the mapped reads. Sanger sequencing validation The deletion borders of SymCRK in wn and MtEGY1 in yl1 were amplified using Q5 ® High-Fidelity DNA Polymerase (NEB #M0491) with primers spanning the target deletion sites. The PCR was conducted as follows: 98 °C for 10 s, 60 °C for 30 s and 72 °C for 30 s for 35 cycles. The PCR products were Sanger sequenced. The primers were listed in the Additional file 4: Table S3.
Three immunoglobulin classes in the pigeon (Columbia livia). Three classes of immunoglobulins have been identified in the pigeon. IgG and IgM were purified from pigeon serum whereas IgA was isolated from pigeon hepatic bile. Pigeon IgG and IgM had the same properties and immunohistological distribution as their chicken homologues. Pigeon IgA was identified on the following grounds: it contains the same light chains as pigeon IgM and IgG; it is relatively abundant in exocrine secretions such as bile, egg white, cropmilk and intestinal fluid, whereas it is present only in small amounts in serum; it occurs in the cytoplasm of the majority of the immunocytes from the intestinal mucosa; its electrophoretic mobility and molecular size are similar to those of chicken IgA. Surprisingly, no immunoglobulin-containing cells could be detected in sections of the wall of the cropmilk gland, despite the high IgA content of the cropmilk.
1. Field of the Invention The present invention relates to a test structure and a test method utilizing the test structure, and, particularly, to a test structure and a test method for wafer level reliability (WLR). 2. Description of the Prior Art In a semiconductor manufacturing industry, the lifetime test can be generally categorized into two types, called “product reliability” and “process reliability” respectively. The product reliability means that a produced and preliminarily packaged chip is tested in a high temperature, a high pressure and a high humidity state for obtaining the lifetime. The process reliability means that a preliminarily produced semiconductor element is subjected to a lifetime test aimed at a semiconductor element material for ensuring no reliability problems in a successive process. The reliability test methods in a semiconductor manufacturing industry can be also categorized into two types, “Wafer-Level Reliability (WLR)” and “Package-Level Reliability (PLR)”. The differences between the two test types include that, in the former, the wafer is directly placed in a test machine on a common producing line for a test, and, in the latter, the wafer must be cut into chips and the chips are packaged to form test samples (device under test, DUT), then the samples are socketed into a burn-in board and placed in a high temperature oven (such as up to 350° C.) for a test. The WLR method is generally fast and direct. In the WLR method, the test result can be obtained in a very short time, not after packing, to determine the reliability of the given wafer for subsequent improvements or processes. While, the PLR method needs a long test time. If the test result is not satisfied, it often takes a long time to obtain a next test result, due to the time consumed for packing and re-testing the further modified product. One of basic reliability tests is the electron migration (EM) test. The EM means that when a conducting wire (e.g. an aluminum wire) connecting transistors is provided with a current for a long time, the aluminum atoms are moved from a negative electrode to a positive electrode by an electron wind force. A depletion of the aluminum atoms in the negative electrode occurs to cause the circuit to open, or a stacking of the aluminum atoms in the positive electrode occur to cause the circuit to short. As time goes on, the aforementioned conditions become serious and finally the integrated circuit doesn't operate. Hence, the EM test is a basic and important test item. A traditional EM test of a metal is carried out by PLR, in which the test conditions are close to the use condition of the metal, and is widely accepted in the semiconductor industry. Nevertheless, if the test time of PLR-EM test can be shorten with the WLR-EM test, it will be a great help to shorten the manufacturing time. However, such EM test is not generally used in the industry, and the main reason is that some arguments are still pending. First, with respect to the failure mechanism of EM, since the test current density in EM is very high (for example, 60 to 70 mA) and may be ten-folds of that used in the traditional PLR-test, the aluminum wire may be melted away and open due to the high temperature, but not EM. Second, whether the results of WLR-EM test and the traditional PLR-EM test are in good correlation. If the correlation is poor, the test result of WRL-EM may not be correct. Therefore, there is still a need for a better wafer level test structure and a better wafer level test method for conveniently obtaining test results.
/** * nimble:restivus */ declare module 'meteor/nimble:restivus' { import { Meteor } from 'meteor/meteor'; import { ClientRequest, ServerResponse } from 'http'; type RestivusOptions = { apiPath?: string; auth?: { token: string; user: Function; }; defaultHeaders?: object; defaultOptionsEndpoint?: { authRequired?: boolean; roleRequired?: boolean; action?: Function; }; enableCors?: boolean; onLoggedIn?: Function; onLoggedOut?: Function; prettyJson?: boolean; useDefaultAuth?: boolean; version?: string; }; type EndpointContext = { user: Meteor.User; userId: string; urlParams: any; queryParams: any; bodyParams: any; request: ClientRequest; response: ServerResponse; done(): void; }; export class Restivus { constructor(options: RestivusOptions); addRoute( path: string, endpoints: { getAll?(this: EndpointContext): any; get?(this: EndpointContext): any; post?(this: EndpointContext): any; put?(this: EndpointContext): any; patch?(this: EndpointContext): any; delete?(this: EndpointContext): any; } ): any; addRoute( path: string, routeOptions?: { authRequired?: boolean; roleRequired?: string | string[]; action?: Function; }, excludedEndpoints?: string | string[], endpoints?: { getAll?(this: EndpointContext): any; get?(this: EndpointContext): any; post?(this: EndpointContext): any; put?(this: EndpointContext): any; patch?(this: EndpointContext): any; delete?(this: EndpointContext): any; } ): any; addCollection( path: string, endpoints: { getAll?(this: EndpointContext): any; get?(this: EndpointContext): any; post?(this: EndpointContext): any; put?(this: EndpointContext): any; patch?(this: EndpointContext): any; delete?(this: EndpointContext): any; } ): any; addCollection( path: string, routeOptions?: { authRequired?: boolean; roleRequired?: string | string[]; action?: Function; }, excludedEndpoints?: string | string[], endpoints?: { getAll?(this: EndpointContext): any; get?(this: EndpointContext): any; post?(this: EndpointContext): any; put?(this: EndpointContext): any; patch?(this: EndpointContext): any; delete?(this: EndpointContext): any; } ): any; } }
import type { APIInteraction as I, APIInteractionResponse as R } from "../Types"; /** Generic manager that every other manager extends. */ export interface Manager<Data extends I, Response extends R> { /** * Finds the internal function that handles the interaction and executes it, * returning the result. If no function is found, a properly formatted "error * message" is returned instead. * @param interaction The interaction to be processed * @returns The (initial) response to the interaction. */ execute(interaction: Data): Promise<Response>; }
""" OVERVIEW This lecture finishes the discussion of dictionaries, then introduces inductive reasoning and recursion. Examples include generating the Fibonacci sequence and solving the Towers of Hanoi problem. """ """ Check yourself: - what is recursion? - what is a recursive case? - what is a base case? """ """ Problem sets - successive approximaition and a wordgame """ """ ANNOTATIONS modular abstraction - isolate an procedure in one place divide and conquer .definition - take a hard problem, and break it up into simpler pieces .benefits - small problems are easier to solve - solutions to small problems can easily be combined to solve the original problem recursion (an implementation of the divide and conquer technique) .definition - a way of describing problems - a way of designing solutions .base case - describe the simplest version of the problem - direct anwer .inductive case - reduce to a simpler version of the same problems - other simpler operations example: x^n = x*x*x...x x^n = n = 0 -> x = 1 (base case) x^n = x*(n-1) (base case) """ def simpleExp(x, n): # base case if n == 0: return 1 else: # smaller version of the same problem return x * simpleExp(x, n-1) """ Palindrome .definition - is a word, phrase, number, or other sequence of symbols or elements, whose meaning may be interpreted the same way in either forward or reverse direction base case: - if both the first and the last word are equal, then it is a palindrome """ def isPalindrome(setence): if len(setence) <= 1: return True else: return setence[0] == setence[-1] and isPalindrome(setence[1:-1]) def normalizeSetence(setence): return setence.replace(" ", "").lower() setences = [ "Amor, Roma", "A Santa dog lived as a devil God at NASA", "Animal loots foliated detail of stool lamina", "No, I'm not" ] for setence in setences: print "is %s a palindrome?" % setence, setence = normalizeSetence(setence) print isPalindrome(setence) """ !!! Break problems into simpler versions of the same problem """ """ FIBONACCI """ def fib(x): if x <= 1: return 1 else: return fib(x-2) + fib(x-1) print fib(0)
1. Field of the Invention This invention relates to a bending apparatus having a die and a punch for bending a sheet (i.e., a workpiece). 2. Description of the Related Art Japanese Kokai Patent Publication H2-112826 discloses a bending apparatus for bending a composite (laminate) metal sheet comprising an outer and an inner metal sheet and a synthetic resin layer interposed between the two metal sheets. The purpose of this apparatus is to preclude a phenomenon of bending due to deviation of the outer and inner metal sheets. To meet this end, the apparatus comprises a first die, which has an end valley for constituting a V-shaped groove and is vertically movable, and a second die, which has wall surfaces for constituting the V-shaped groove. When the first die is pushed by the punch to be lowered to a predetermined position, its end valley becomes continuous to the inclined wall surfaces of the second die, thus completing the V-shaped groove. In a first stage of operation a V-shaped bend of short inclined surfaces is formed with the first die, and in the second stage a V-shaped bend of long inclined surfaces is formed with the first and second dies to thereby correct the bend formed in the first stage. According to this teaching, however, there is a problem in that a flaw is provided in the workpiece along the boundary between the first and second dies. Japanese Kokai Utility Model Publication H3-14010 discloses a bending apparatus, which, like the above bending apparatus, has a purpose of precluding the phenomenon of bending when bending a composite (laminate) metal sheet. The apparatus features a pair of steel receptacles having respective spherical or semi-cylindrical journals formed on the back side. The surfaces of the steel receptacles, on which the workpiece is set, are located on the outer side of the center of rotation of the journals. In this arrangement, in an initial stage of the bending process the workpiece undergoes the bending process while the steel plate receptacles are moved away from the bending line. Thus, the phenomenon of bending due to the pulling of the workpiece toward the bending line can be basically avoided. In addition, since the amount of friction between the workpiece and the steel receptacles is reduced during the bending process, it is possible to inhibit the generation of flaws due to the friction. In this case, however, at the end of the bending process a gap is produced between the steel receptacles. Thus, it is impossible to apply back pressure to the outer surface of the workpiece near the radius portion, and it is thus difficult to make the bending radius smaller than a certain value. Further, a flaw is likely to be produced along the borderline between the steel receptacles and the workpiece. Japanese Kokai Utility Model Publication H2-42718 discloses a bending apparatus which seeks to solve the above problem. This apparatus comprises a V-shaped die having a central V-shaped die groove, a pair of slidable supports of an elastic material provided at the ends of the V-shaped die, and rotary dies provided on the upper end of the slidable supports and having a semi-circular sectional profile. In the initial stage of the bending process, the workpiece is bent about its portion in contact with the punch with the descent of the punch and the rotation of the rotary dies, and at the end of the bending process the V-shaped die is brought into contact with the radius portion. Thus, accurate bending can be obtained. However, again in this case an entire surface contact cannot be obtained although an end portion of the V-shaped die is brought into contact with the workpiece at the end of the bending process, thus posing the problem that a flaw is liable to be produced in the workpiece. Japanese Kokai Patent Publication H2-11225 discloses a further bending apparatus, which, like the above bending apparatus, seeks to suppress the phenomenon of bending. To attain this aim, in a first step an elastic block is set in a V-shaped groove of a die for bending with a punch, and a second step of bending is performed with a workpiece reception die, which is made of two different elastic materials having different hardnesses. However, again in this case the restoration of the elastic materials from deformation is deteriorated with the lapse of time. Therefore, the apparatus is not suitable for continuous operation. In addition, it is thought that the quality of bending is subject to deterioration with the lapse of time. Further, the width of the V of the die that is required to preclude flaws from being produced in the workpiece is 5 to 6 times the thickness of the workpiece. In the above case, however, it is possible to cope with only a single workpiece thickness.
a = sorted(list(map(int, input().split(' ')))) tmp = [a[1]-a[0], a[2]-a[1], a[2]-a[0]] if tmp[0] % 2 == 0: print(int(tmp[0] / 2) + tmp[1]) else: print(int((tmp[1]+tmp[2]+1) / 2) + 1)
NEW GARDEN—Police needed to use a taser on a Landenberg man after he resisted arrest by refusing to leave the property of a homeowner. Jeffrey E. Hoopes, 47, of Landenberg, was arrested and charged with criminal trespass, resisting arrest, harassment, and disorderly conduct, following an incident that occurred 3:37 p.m., in the 900 block of Newark Road, in New Garden Township on March 7. Police were called to the location for a reported disturbance after Hoopes allegedly walked into a residence and refused to leave at the homeowner’s request. Hoopes had previously been notified that he was not welcome or permitted to be at this residence. When police arrived and attempted to take him into custody, the report said he actively resisted efforts to handcuff him and ignored verbal commands to cease his resistance. Police needed to use a taser to place Hoopes into custody. He was transported to Central Booking where he was processed and held for arraignment. He was subsequently released on $10,000 unsecured bail. A pedestrian was struck Wednesday along Lincoln Highway, after he exited his vehicle and crossed the road, police said. That accident happened shortly after 3:30 p.m. in the area of Lincoln Highway and South 12th Avenue. Alfred Adams, 74, was injured and taken to Paoli Hospital. The 51-year-old driver of that striking vehicle stayed at the scene, and did not suffer any injuries, police said. On March 19, at 10:59 p.m., Coatesville Police were dispatched to the 200 block of Union Avenue in reference to a suspicious condition. The caller advised there was a minivan occupied for approximately 45 minutes. Upon arrival police made contact with Holly Back who was suffering from an overdose after taking heroin. Police administered Narcan and summoned EMS to treat the patient, who was later transported to Brandywine Hospital for further treatment. The passenger in the vehicle, Miles Munion of Goosetown Road in Coatesville, who was also under the influence of heroin, was found to be in possession of a needle, a crack pipe, and crack cocaine. Munion was taken into custody and transported back to the police station where he was processed and held for arraignment. On March 18 at 7:42 p.m., officers on patrol in the area of the 500 block of East Lincoln Highway observed Jodi Kerns, 36, of Coatesville, who was confirmed to have an active warrant for her arrest. Kerns was taken into custody without incident. After arriving on station Kerns admitted to having items of drug paraphernalia as well as possessing a small amount of crack cocaine. These items were retrieved and Kerns was additionally charged with these drug offenses. On March 17 at 2:33 p.m. police were dispatched to the 700 block Valley Road for a criminal mischief report. A 2014 Dodge Avenger had a driver's side mirror broken overnight. Estimated damage was $50. Nathan Conforti, 18, of West Chester, and Daniel McGinn, 18, of Coatesville were arrested for DUI on March 16 at 11:50 p.m. at a DUI checkpoint at 1121 Downingtown Pike, West Bradford. Three men were arrested after a bullet was shot into a residence at 2017 Ridgewood Drive in West Bradford on March 16 at 9:15 p.m., police said. Arrested were a 32-year-old man from Coatesville, a 35-uear-old man from Downingtown, and a 31-year-old man from Concord, N.C., who were located on a nearby property. Charles E. Morgensen, 30, of Coatesville, was charged with careless driving following a March 15 one-vehicle accident on Doe Run Road in East Marlborough Township. Morgensen was driving south on Doe Run but crossed over into the northbound lane and struck an embankment and then a sign for Sweeney’s Auto Service. Morgensen was transported for treatment of a suspected injury. Rex R. Anderson, 63, and Deborah D. Anderson, 59, were arrested for DUI and marijuana possession respectively. The arrests followed a traffic stop on Route 1 at Greenwood Road at 4:20 p.m. on March 11. Wesley Cordell Holloway, 36, would be charged with retail theft after he attempted to leave the Walmart in East Marlborough Township on March 9. A trooper and a store employee met Holloway as he was trying to leave the store with an action figure and multiple video game controllers. He was taken into custody and the items were returned, police said. Anastasia Janoszewski, 19, of Chadds Ford, was found to be in possession of a small amount of marijuana and paraphernalia on Creek Road south of Cossart Road on Feb. 17, police said. Charles A. Cole, 67, of Cochranville, was charged with following too closely after an accident on Lenape Road at Pocopson Road. According to a report, Cole struck another vehicle that was stopped for a red light at the intersection at 3:46 p.m. on Dec. 26. No injuries were reported. Police say there is a man posing as a Chester Water Company employee telling homeowners that if they don’t give him $75 cash now, their water will be turned off. Police want to remind residents that this is a scam. Do not give the male any money. Police ask anyone approached by this individual to call 911. Police said Javaun D. Stevens, 23, of Wilmington, was arrested and charged with simple assault and harassment, following an altercation in a vehicle, during which he reportedly struck a female in the face with his hand. The incident occurred on March 9 at 6:45 a.m., on Brittany Drive, in New Garden Township. Police said they observed injury to the victim consistent with the assault as reported. Stevens was taken into custody and was transported to Central Booking where he was processed and held for arraignment. He was released on $10,000 bail. John A. Conner, 21, of Wilmington, was arrested on suspicion of DUI and related traffic offenses, as well as for possession of marijuana and related paraphernalia, following a March 9 incident on Limestone Road, in New Garden Township, a report said. According to that report, police responded to a vehicle in a field, adjacent to Limestone and Southwood roads. Upon arrival, police located a mud-covered Mazda off the roadway and found the driver, later identified as Conner, asleep at the wheel, with the vehicle running. Once awakened by police, Conner exhibited indications of being intoxicated, police said and he was taken into custody for suspicion of DUI. A search of the vehicle produced a bag containing marijuana. Southern Chester County Regional Police Department is investigating the recovery of a stolen 1998 Honda CRV that was destroyed by fire. The incident occurred on March 9 at 12:23 a.m., in a field adjacent to Broad Run and Eden Roads, in New Garden Township. Police were dispatched for a report of a brush fire. Upon arrival, police discovered that it was actually a vehicle on fire and verified that there were no occupants inside. The Avondale Fire Company responded and extinguished the fire. The vehicle was towed from the scene later that day. It was subsequently inspected by both police and by the Chester County Fire Marshal and identifying numbers were obtained that, once queried, revealed that the vehicle had been stolen from New Castle County in November. Police are asking that anyone with information contact police at (610)268-3171. Anonymous tips can be submitted by calling (610)268-2907 ext. 222. Junior Gonzales-Martinez, 21, of Toughkenamon was arrested and charged with DUI and related traffic offenses, as well as with Endangering Welfare of Children and Recklessly Endangering Another Person, following a traffic stop for an expired registration. The incident occurred on March 8 at 1:28 a.m. in the 100 block of Center Street, in New Garden Township. Upon making contact with Gonzales-Martinez, police observed indicators suggesting intoxication and he possessed no license, registration, or insurance for the vehicle. Police also observed that he was transporting two passengers, an adult female and an 8-year-old child. Both passengers were transported to their residence by police. Gonzales- Martinez was taken into custody for suspicion of DUI and submitted to a chemical test of his breath, resulting in a B.A.C. of .151 percent. Alejandro Gonzalez-Hernandez, 21, of Toughkenamon was arrested and charged with DUI and related traffic offenses, after he failed to stop for a posted stop sign and nearly collided with a police vehicle. The incident occurred on March 9 at. 2:37 a.m., at the intersection of Newark Road and Pine Street, in New Garden Township. Upon making contact with Gonzalez-Hernandez, police observed indicators suggesting intoxication and record check revealed that he was an unlicensed driver. He was taken into custody for suspicion of DUI and submitted to a chemical test of his breath, resulting in a B.A.C. of .126 percent.
// // RectBuffers.h // FMChart // // Created by <NAME> on 2015/08/26. // Copyright © 2015 <NAME>. All rights reserved. // #import <Foundation/Foundation.h> #import <CoreGraphics/CGGeometry.h> #import "Rect_common.h" #import "Buffers.h" #import "Engine.h" @interface NSValue (FMRectCornerRadius) + (instancetype _Nonnull)valueWithCornerRadius:(FMRectCornerRadius)radius; - (FMRectCornerRadius)FMRectCornerRadiusValue; @end /** * FMUniformPlotRectAttributes is a wrapper class for struct uniform_plot_rect that provides setter methods. * The direction 'top' is defined by user interface (i.e. that of the view coordinate system). * color vectors are in RGBA format. * You better not to call setDepthValue directly (it is for FMPlotArea) unless you're implementing custom primitives. */ @interface FMUniformPlotRectAttributes : NSObject @property (readonly, nonatomic) id<MTLBuffer> _Nonnull buffer; @property (readonly, nonatomic) uniform_plot_rect * _Nonnull rect; @property (readonly) BOOL roundEnabled; - (instancetype _Nonnull)initWithResource:(FMDeviceResource * _Nonnull)resource; - (void)setColorVec:(vector_float4)color; - (void)setStartColor:(vector_float4)startColor position:(CGPoint)startPosition endColor:(vector_float4)endColor position:(CGPoint)endPosition ; - (void)setCornerRadius:(float)radius; - (void)setDepthValue:(float)value; @end /** * FMUniformBarConfiguration is a wrapper class for struct uniform_bar_conf that provides setter methods. * * barDirection property determines which direction a bar with positive value will extend, * and more importantly, it decides which is 'top'. * * anchorPoint decides the origin (and where bars extend from) * * The point is, the data point is given in the form of 2-component value : * so 'the goal' of the bar extension is decided solely by the data point, and * 'the origin/root' of the bar is decided by anchorPoint and direction. * * Well in most cases, what you need to do is so simple : * set barDirection = (0,1) for bar series and barDirection = (1, 0) for column series. * (keep anchorPoint to (0, 0).) */ @interface FMUniformBarConfiguration : NSObject @property (readonly, nonatomic) id<MTLBuffer> _Nonnull buffer; @property (readonly, nonatomic) uniform_bar_conf * _Nonnull conf; - (instancetype _Nonnull)initWithResource:(FMDeviceResource * _Nonnull)resource; - (void)setDepthValue:(float)value; - (void)setAnchorPoint:(CGPoint)point; - (void)setBarDirection:(CGPoint)dir; @end /** * FMUniformBarAttributes is a wrapper class for struct uniform_bar_attr that provides setter methods. * Interpretations of inner/outer radius and colors are equivalent to those of FMUniformPlotRect. * * see FMUniformBarConfiguration for general interpretation of the 'top'. * if the given value is negative, then 'top' will be reverted (direction which the bar extends), * but the 'left' and the 'right' will not. * (barDirection=(0,1), value=(1, -1), then the bar will extends downward, so the 'top' means downward, but 'left' remains left(negative x), * and the 'rigtht' remains right (positive x)). * */ @interface FMUniformBarAttributes : FMAttributesBuffer @property (readonly, nonatomic) uniform_bar_attr * _Nonnull attr; - (instancetype _Nonnull)initWithResource:(FMDeviceResource * _Nonnull)resource size:(NSUInteger)size UNAVAILABLE_ATTRIBUTE; - (instancetype _Nonnull)initWithResource:(FMDeviceResource * _Nonnull)resource; - (void)setColorVec:(vector_float4)color; /** * For interpretations of each corner's position, see class summary. * all radiuses are in logical pixels. */ - (void)setCornerRadius:(float)lt rt:(float)rt lb:(float)lb rb:(float)rb; /** * For interpretations of each corner's position, see class summary. * all radiuses are in logical pixels. */ - (void)setCornerRadius:(FMRectCornerRadius)radius; /** * sets uniformal radius in logical pixels to all corners. */ - (void)setAllCornerRadius:(float)radius; /** * sets bar width in logical pixels. */ - (void)setBarWidth:(float)width; @end /** * See FMAttributesArray, FMArrayBuffer and FMUniformBarAttributes for details. */ @interface FMUniformBarAttributesArray : FMAttributesArray<FMUniformBarAttributes*> - (instancetype _Nonnull)initWithResource:(FMDeviceResource * _Nonnull)resource capacity:(NSUInteger)capacity ; @end
<reponame>jjcdutra2015/orange-talents-03-template-casa-do-codigo package br.com.zupacademy.juliodutra.casadocodigo.controller.dto; import br.com.zupacademy.juliodutra.casadocodigo.model.Categoria; public class CategoriaDto { private String nome; public CategoriaDto(Categoria categoria) { this.nome = categoria.getNome(); } public String getNome() { return nome; } }
<filename>src/main/java/io/github/vrchatapi/model/Avatar.java /* * VRChat API Documentation * * The version of the OpenAPI document: 1.6.7 * Contact: <EMAIL> * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package io.github.vrchatapi.model; import java.util.Objects; import java.util.Arrays; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; import io.github.vrchatapi.model.AvatarUnityPackageUrlObject; import io.github.vrchatapi.model.ReleaseStatus; import io.github.vrchatapi.model.UnityPackage; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.threeten.bp.OffsetDateTime; /** * Avatar */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") public class Avatar { public static final String SERIALIZED_NAME_ASSET_URL = "assetUrl"; @SerializedName(SERIALIZED_NAME_ASSET_URL) private String assetUrl; public static final String SERIALIZED_NAME_ASSET_URL_OBJECT = "assetUrlObject"; @SerializedName(SERIALIZED_NAME_ASSET_URL_OBJECT) private Object assetUrlObject; public static final String SERIALIZED_NAME_AUTHOR_ID = "authorId"; @SerializedName(SERIALIZED_NAME_AUTHOR_ID) private String authorId; public static final String SERIALIZED_NAME_AUTHOR_NAME = "authorName"; @SerializedName(SERIALIZED_NAME_AUTHOR_NAME) private String authorName; public static final String SERIALIZED_NAME_CREATED_AT = "created_at"; @SerializedName(SERIALIZED_NAME_CREATED_AT) private OffsetDateTime createdAt; public static final String SERIALIZED_NAME_DESCRIPTION = "description"; @SerializedName(SERIALIZED_NAME_DESCRIPTION) private String description; public static final String SERIALIZED_NAME_FEATURED = "featured"; @SerializedName(SERIALIZED_NAME_FEATURED) private Boolean featured = false; public static final String SERIALIZED_NAME_ID = "id"; @SerializedName(SERIALIZED_NAME_ID) private String id; public static final String SERIALIZED_NAME_IMAGE_URL = "imageUrl"; @SerializedName(SERIALIZED_NAME_IMAGE_URL) private String imageUrl; public static final String SERIALIZED_NAME_NAME = "name"; @SerializedName(SERIALIZED_NAME_NAME) private String name; public static final String SERIALIZED_NAME_RELEASE_STATUS = "releaseStatus"; @SerializedName(SERIALIZED_NAME_RELEASE_STATUS) private ReleaseStatus releaseStatus = ReleaseStatus.PUBLIC; public static final String SERIALIZED_NAME_TAGS = "tags"; @SerializedName(SERIALIZED_NAME_TAGS) private List<String> tags = new ArrayList<String>(); public static final String SERIALIZED_NAME_THUMBNAIL_IMAGE_URL = "thumbnailImageUrl"; @SerializedName(SERIALIZED_NAME_THUMBNAIL_IMAGE_URL) private String thumbnailImageUrl; public static final String SERIALIZED_NAME_UNITY_PACKAGE_URL = "unityPackageUrl"; @SerializedName(SERIALIZED_NAME_UNITY_PACKAGE_URL) private String unityPackageUrl; public static final String SERIALIZED_NAME_UNITY_PACKAGE_URL_OBJECT = "unityPackageUrlObject"; @SerializedName(SERIALIZED_NAME_UNITY_PACKAGE_URL_OBJECT) private AvatarUnityPackageUrlObject unityPackageUrlObject; public static final String SERIALIZED_NAME_UNITY_PACKAGES = "unityPackages"; @SerializedName(SERIALIZED_NAME_UNITY_PACKAGES) private Set<UnityPackage> unityPackages = new LinkedHashSet<UnityPackage>(); public static final String SERIALIZED_NAME_UPDATED_AT = "updated_at"; @SerializedName(SERIALIZED_NAME_UPDATED_AT) private OffsetDateTime updatedAt; public static final String SERIALIZED_NAME_VERSION = "version"; @SerializedName(SERIALIZED_NAME_VERSION) private Integer version = 0; public Avatar assetUrl(String assetUrl) { this.assetUrl = assetUrl; return this; } /** * Not present from general serach &#x60;/avatars&#x60;, only on specific requests &#x60;/avatars/{avatarId}&#x60;. * @return assetUrl **/ @javax.annotation.Nullable @ApiModelProperty(value = "Not present from general serach `/avatars`, only on specific requests `/avatars/{avatarId}`.") public String getAssetUrl() { return assetUrl; } public void setAssetUrl(String assetUrl) { this.assetUrl = assetUrl; } public Avatar assetUrlObject(Object assetUrlObject) { this.assetUrlObject = assetUrlObject; return this; } /** * Not present from general serach &#x60;/avatars&#x60;, only on specific requests &#x60;/avatars/{avatarId}&#x60;. **Deprecation:** &#x60;Object&#x60; has unknown usage/fields, and is always empty. Use normal &#x60;Url&#x60; field instead. * @return assetUrlObject **/ @javax.annotation.Nullable @ApiModelProperty(value = "Not present from general serach `/avatars`, only on specific requests `/avatars/{avatarId}`. **Deprecation:** `Object` has unknown usage/fields, and is always empty. Use normal `Url` field instead.") public Object getAssetUrlObject() { return assetUrlObject; } public void setAssetUrlObject(Object assetUrlObject) { this.assetUrlObject = assetUrlObject; } public Avatar authorId(String authorId) { this.authorId = authorId; return this; } /** * A users unique ID, usually in the form of &#x60;usr_c1644b5b-3ca4-45b4-97c6-a2a0de70d469&#x60;. Legacy players can have old IDs in the form of &#x60;8JoV9XEdpo&#x60;. The ID can never be changed. * @return authorId **/ @javax.annotation.Nonnull @ApiModelProperty(example = "usr_c1644b5b-3ca4-45b4-97c6-a2a0de70d469", required = true, value = "A users unique ID, usually in the form of `usr_c1644b5b-3ca4-45b4-97c6-a2a0de70d469`. Legacy players can have old IDs in the form of `8JoV9XEdpo`. The ID can never be changed.") public String getAuthorId() { return authorId; } public void setAuthorId(String authorId) { this.authorId = authorId; } public Avatar authorName(String authorName) { this.authorName = authorName; return this; } /** * Get authorName * @return authorName **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getAuthorName() { return authorName; } public void setAuthorName(String authorName) { this.authorName = authorName; } public Avatar createdAt(OffsetDateTime createdAt) { this.createdAt = createdAt; return this; } /** * Get createdAt * @return createdAt **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public OffsetDateTime getCreatedAt() { return createdAt; } public void setCreatedAt(OffsetDateTime createdAt) { this.createdAt = createdAt; } public Avatar description(String description) { this.description = description; return this; } /** * Get description * @return description **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Avatar featured(Boolean featured) { this.featured = featured; return this; } /** * Get featured * @return featured **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public Boolean getFeatured() { return featured; } public void setFeatured(Boolean featured) { this.featured = featured; } public Avatar id(String id) { this.id = id; return this; } /** * Get id * @return id **/ @javax.annotation.Nonnull @ApiModelProperty(example = "avtr_912d66a4-4714-43b8-8407-7de2cafbf55b", required = true, value = "") public String getId() { return id; } public void setId(String id) { this.id = id; } public Avatar imageUrl(String imageUrl) { this.imageUrl = imageUrl; return this; } /** * Get imageUrl * @return imageUrl **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getImageUrl() { return imageUrl; } public void setImageUrl(String imageUrl) { this.imageUrl = imageUrl; } public Avatar name(String name) { this.name = name; return this; } /** * Get name * @return name **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getName() { return name; } public void setName(String name) { this.name = name; } public Avatar releaseStatus(ReleaseStatus releaseStatus) { this.releaseStatus = releaseStatus; return this; } /** * Get releaseStatus * @return releaseStatus **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public ReleaseStatus getReleaseStatus() { return releaseStatus; } public void setReleaseStatus(ReleaseStatus releaseStatus) { this.releaseStatus = releaseStatus; } public Avatar tags(List<String> tags) { this.tags = tags; return this; } public Avatar addTagsItem(String tagsItem) { this.tags.add(tagsItem); return this; } /** * Get tags * @return tags **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public Avatar thumbnailImageUrl(String thumbnailImageUrl) { this.thumbnailImageUrl = thumbnailImageUrl; return this; } /** * Get thumbnailImageUrl * @return thumbnailImageUrl **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getThumbnailImageUrl() { return thumbnailImageUrl; } public void setThumbnailImageUrl(String thumbnailImageUrl) { this.thumbnailImageUrl = thumbnailImageUrl; } public Avatar unityPackageUrl(String unityPackageUrl) { this.unityPackageUrl = unityPackageUrl; return this; } /** * Get unityPackageUrl * @return unityPackageUrl **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public String getUnityPackageUrl() { return unityPackageUrl; } public void setUnityPackageUrl(String unityPackageUrl) { this.unityPackageUrl = unityPackageUrl; } public Avatar unityPackageUrlObject(AvatarUnityPackageUrlObject unityPackageUrlObject) { this.unityPackageUrlObject = unityPackageUrlObject; return this; } /** * Get unityPackageUrlObject * @return unityPackageUrlObject * @deprecated **/ @Deprecated @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public AvatarUnityPackageUrlObject getUnityPackageUrlObject() { return unityPackageUrlObject; } public void setUnityPackageUrlObject(AvatarUnityPackageUrlObject unityPackageUrlObject) { this.unityPackageUrlObject = unityPackageUrlObject; } public Avatar unityPackages(Set<UnityPackage> unityPackages) { this.unityPackages = unityPackages; return this; } public Avatar addUnityPackagesItem(UnityPackage unityPackagesItem) { this.unityPackages.add(unityPackagesItem); return this; } /** * Get unityPackages * @return unityPackages **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public Set<UnityPackage> getUnityPackages() { return unityPackages; } public void setUnityPackages(Set<UnityPackage> unityPackages) { this.unityPackages = unityPackages; } public Avatar updatedAt(OffsetDateTime updatedAt) { this.updatedAt = updatedAt; return this; } /** * Get updatedAt * @return updatedAt **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "") public OffsetDateTime getUpdatedAt() { return updatedAt; } public void setUpdatedAt(OffsetDateTime updatedAt) { this.updatedAt = updatedAt; } public Avatar version(Integer version) { this.version = version; return this; } /** * Get version * minimum: 0 * @return version **/ @javax.annotation.Nonnull @ApiModelProperty(example = "68", required = true, value = "") public Integer getVersion() { return version; } public void setVersion(Integer version) { this.version = version; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Avatar avatar = (Avatar) o; return Objects.equals(this.assetUrl, avatar.assetUrl) && Objects.equals(this.assetUrlObject, avatar.assetUrlObject) && Objects.equals(this.authorId, avatar.authorId) && Objects.equals(this.authorName, avatar.authorName) && Objects.equals(this.createdAt, avatar.createdAt) && Objects.equals(this.description, avatar.description) && Objects.equals(this.featured, avatar.featured) && Objects.equals(this.id, avatar.id) && Objects.equals(this.imageUrl, avatar.imageUrl) && Objects.equals(this.name, avatar.name) && Objects.equals(this.releaseStatus, avatar.releaseStatus) && Objects.equals(this.tags, avatar.tags) && Objects.equals(this.thumbnailImageUrl, avatar.thumbnailImageUrl) && Objects.equals(this.unityPackageUrl, avatar.unityPackageUrl) && Objects.equals(this.unityPackageUrlObject, avatar.unityPackageUrlObject) && Objects.equals(this.unityPackages, avatar.unityPackages) && Objects.equals(this.updatedAt, avatar.updatedAt) && Objects.equals(this.version, avatar.version); } @Override public int hashCode() { return Objects.hash(assetUrl, assetUrlObject, authorId, authorName, createdAt, description, featured, id, imageUrl, name, releaseStatus, tags, thumbnailImageUrl, unityPackageUrl, unityPackageUrlObject, unityPackages, updatedAt, version); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Avatar {\n"); sb.append(" assetUrl: ").append(toIndentedString(assetUrl)).append("\n"); sb.append(" assetUrlObject: ").append(toIndentedString(assetUrlObject)).append("\n"); sb.append(" authorId: ").append(toIndentedString(authorId)).append("\n"); sb.append(" authorName: ").append(toIndentedString(authorName)).append("\n"); sb.append(" createdAt: ").append(toIndentedString(createdAt)).append("\n"); sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" featured: ").append(toIndentedString(featured)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" imageUrl: ").append(toIndentedString(imageUrl)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" releaseStatus: ").append(toIndentedString(releaseStatus)).append("\n"); sb.append(" tags: ").append(toIndentedString(tags)).append("\n"); sb.append(" thumbnailImageUrl: ").append(toIndentedString(thumbnailImageUrl)).append("\n"); sb.append(" unityPackageUrl: ").append(toIndentedString(unityPackageUrl)).append("\n"); sb.append(" unityPackageUrlObject: ").append(toIndentedString(unityPackageUrlObject)).append("\n"); sb.append(" unityPackages: ").append(toIndentedString(unityPackages)).append("\n"); sb.append(" updatedAt: ").append(toIndentedString(updatedAt)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
Lamb cutlets, chicken thighs and king prawns are cooked in a traditional tandoori marinade, with a dill fusion version for the salmon. Mix all the marinade ingredients together and season with plenty of salt and pepper. Divide the marinade between four bowls. Add the lamb, chicken and prawns to three of the bowls. Stir the dill and capers into the remaining bowl and add the salmon. Massage the marinade into the meats, prawns and salmon, then cover and marinate for at least 1 hour – overnight if possible. When you are ready to cook everything, heat your grill to its highest setting. Arrange the chicken on a rack over a lined tray and grill for 4–5 minutes on each side (this is how long the chicken will take after it has been flattened – if it is not flat, you will need to cook it for longer). Remove and rest. Add the lamb and cook for 2–3 minutes on each side. Again, remove and rest. The salmon will need 4–5 minutes on each side, and finally the prawns will need just 2–3 minutes on each side, depending on their size. You could cook the chicken and salmon together and the lamb and prawns together if you like. Serve with naan bread, lemon wedges and some raita on the side.
#include <bits/stdc++.h> using namespace std; int main(){ int n;cin>>n; int pos1,pos2; int i,tmp; for( i=1;i<=n;i++){ cin>>tmp; if(tmp==1) pos1 = i; if(tmp==n) pos2=i; } int diff = abs(pos1-pos2); if(diff == n-1){ cout<<n-1<<endl; } else{ int P1 = n - min(pos1,pos2); int P2 = max(pos1,pos2) - 1; cout<< max(P1,P2)<<endl; } }
import random n = input() #let user input a number of their choice if n == 0: n = int(random.random() * 1000000000) #if user give up choice count_1 = 0 for i in xrange(0,n): temp = str(i) if temp[0] == '1': count_1 += 1 print(float(count_1)/n)
Cytoplasmic Partitioning of P Granule Components Is Not Required to Specify the Germline in C. elegans Making a Germ Cell When it comes to generating the germ line, animals fall into two classes: those, like mammals, which use inductive interactions to specify the germ line and those, like nematodes, which use germ plasma specialized egg cytoplasm that segregates asymmetrically in embryos. Germ plasm contains germ, or P, granulesRNA-protein aggregates that have been thought to harbor the germ line determinants. Now, Gallo et al. (p. 1685, published online 2 December) describe a Caenorhabditis elegans mutant that challenges this belief. The germ line still formed even when germ granule components were missegregated. Thus, even in animals with germ plasm, germ granules appear to be a consequence, not a cause, of germ cell specification. Germ granules do not need to be segregated asymmetrically during cell division to specify germ cell fate. Asymmetric segregation of P granules during the first four divisions of the Caenorhabditis elegans embryo is a classic example of cytoplasmic partitioning of germline determinants. It is thought that asymmetric partitioning of P granule components during mitosis is essential to distinguish germline from soma. We have identified a mutant (pptr-1) in which P granules become unstable during mitosis and P granule proteins and RNAs are distributed equally to somatic and germline blastomeres. Despite symmetric partitioning of P granule components, pptr-1 mutants segregate a germline that uniquely expresses P granules during postembryonic development. pptr-1 mutants are fertile, except at high temperatures. Hence, asymmetric partitioning of maternal P granules is not essential to specify germ cell fate. Instead, it may serve to protect the nascent germline from stress.
<filename>1/1.5.2_Char_counting/first.c #include <stdio.h> int main() { long counter = 0; while (getchar() != EOF) ++counter; printf("%ld\n", counter); return 0; }
#include "base/file/path.h" #include <dirent.h> #include <sys/types.h> #include <sys/stat.h> #include <unistd.h> #include "base/strings/strings.h" namespace file { namespace internal { std::string join_path_impl(std::initializer_list<strings::StringPiece> paths) { std::string result; for (strings::StringPiece path : paths) { if (path.empty()) continue; if (result.empty()) { result = path.as_string(); continue; } if (result.back() == '/') { if (is_absolute_path(path)) { result += path.substr(1); } else { result += path; } } else { if (is_absolute_path(path)) { result += path; } else { result += '/'; result += path; } } } return result; } std::string join_path_respect_absolute_impl(std::initializer_list<strings::StringPiece> paths) { std::string result; for (strings::StringPiece path : paths) { if (path.empty()) continue; if (result.empty()) { result = path.as_string(); continue; } if (is_absolute_path(path)) { result = path.as_string(); continue; } if (result.back() == '/') { result += path; } else { result += '/'; result += path; } } return result; } } // namespace internal // This is not the same as posix dirname. // ours always return substr of the path. // // path POSIX OURS // dirname basename dirname basename // ----------------------------------------------------------- // /usr/lib /usr lib /usr lib // /usr/ / usr / usr // usr . usr <empty> usr // / / / / <empty> // . . . <empty> . // .. . .. <empty> .. strings::StringPiece basename(strings::StringPiece path) { path = strings::trim_right(path, '/'); if (path.empty()) return strings::StringPiece(); strings::StringPiece::size_type pos = path.find_last_of('/'); if (pos == strings::StringPiece::npos) return path; return path.substr(pos + 1); } strings::StringPiece dirname(strings::StringPiece path) { if (path == "/") return path; path = strings::trim_right(path, '/'); if (path.empty()) return strings::StringPiece(); strings::StringPiece::size_type pos = path.find_last_of('/'); if (pos == strings::StringPiece::npos) return path.substr(0, 0); if (pos == 0) return path.substr(0, 1); return path.substr(0, pos); } strings::StringPiece stem(strings::StringPiece path) { path = basename(path); strings::StringPiece::size_type pos = path.find_last_of('.'); if (pos == strings::StringPiece::npos) return path; return path.substr(0, pos); } strings::StringPiece extension(strings::StringPiece path) { path = basename(path); strings::StringPiece::size_type pos = path.find_last_of('.'); if (pos == strings::StringPiece::npos) return strings::StringPiece(); return path.substr(pos); } std::string get_current_directory() { char* dir = getcwd(nullptr, 0); if (!dir) return std::string(); std::string result(dir); // Since getcwd allocates memory with malloc, we need to free it. free(dir); return result; } bool is_absolute_path(strings::StringPiece path) { return !path.empty() && path[0] == '/'; } bool is_directory(const char* path) { struct stat st; if (stat(path, &st) < 0) return false; return S_ISDIR(st.st_mode); } bool create_directory(const char* path, int mode) { if (mkdir(path, mode) < 0) return false; return true; } bool remove_directory(const char* path) { if (rmdir(path) < 0) return false; return true; } bool list_files(const char* directory_path, std::vector<std::string>* files) { DIR* dir = opendir(directory_path); if (!dir) return false; while (true) { struct dirent* dent = readdir(dir); if (!dent) break; files->push_back(dent->d_name); } if (closedir(dir) < 0) return false; return true; } bool delete_tree(const char* path) { struct stat st; if (stat(path, &st) < 0) { return false; } if (!S_ISDIR(st.st_mode)) { if (unlink(path) < 0) { return false; } return true; } std::vector<std::string> files; if (!list_files(path, &files)) { return false; } for (const auto& file : files) { if (file == "." || file == "..") continue; std::string p = join_path(path, file); if (!delete_tree(p.c_str())) return false; } if (rmdir(path) < 0) { return false; } return true; } }
President Obama wants the federal government to do some belt-tightening during tough economic times. On Tuesday he signed an executive order directing agencies to cut back on everything from travel to cellphones, printing, and even promotional or commemorative items, such as T-shirts, plaques and mugs. Each executive-branch agency has a month and a half to identify areas to slash expenditures to 20 percent below fiscal 2010 levels, which will produce what the White House estimates will be $4 billion in annual savings. But don’t expect the Oval Office to start cutting back on the official White House M&Ms on Air Force One or curtailing pricey presidential travel. Mr. Obama is traveling to Hawaii on Friday to host the Asia Pacific Economic Cooperation summit, which aims to advance trade and other U.S. economic ties to the region, and then will move on to Australia to promote the two countries’ long-standing alliance. He will end his travel in Bali at another summit, this one aimed at boosting security and anti-terrorism initiatives with Pacific Rim nations. Air Force One will be flying an estimated 50 hours in 10 days, from Washington to San Diego to Honolulu to Canberra, Australia, to Darwin, Australia, to Bali, Indonesia, and then back to Washington. The 50 hours of flight time, multiplied by the $181,000 per hour to operate Air Force One, amounts to more than $9 million, not including the cost of staff time for those traveling with him. After the announcement, during a briefing with reporters about Mr. Obama’s trip, White House spokesman Jay Carney was asked whether the president planned to trim some fat from his travel schedule because he has asked others to do so. Mr. Carney shot down any such notion. “The president makes trips as part of his capacity as commander in chief and president of the United States. There are no plans to [change his travel],” Mr. Carney said. Although Mr. Obama is not leading by example - at least when it comes to travel - at Wednesday’s signing ceremony he commended two agency officials for taking the lead in cutting waste and abuse from their departments. A Commerce Department official, he said, found $2 million a year in savings in cellphone charges, and a Department of Homeland Security official managed to produce tens of millions of dollars in cost savings by changing the way the department obtains goods and services. Mr. Obama on Wednesday also thanked Sens. Tom Coburn, Oklahoma Republican, and Claire McCaskill, Missouri Democrat, for leading efforts to cut waste, fraud and abuse in Congress. The White House sold the move as a portion of his “We Can’t Wait” campaign to circumvent Republican opposition to his job-creation plan, but the executive order is also part of a broader push to reduce government waste and abuse and increase efficiency. In June, Mr. Obama announced an “oversight and accountability” task force, headed by Vice President Joseph R. Biden, aimed at helping agencies reduce waste, fraud and abuse. Reacting to Wednesday’s executive order, Mr. Coburn applauded the president for taking “common sense” and “obvious” steps toward fiscal sanity.
1. Field of the Invention The present invention relates generally to improvements in communication systems and services. More particularly, the present invention relates to techniques for ensuring payment of royalties for copyrighted data delivered over a communication network. 2. Description of Prior Art The recent expansion of wide area computer communication networks, such as the Internet, as well as the planned development of the so-called information superhighway, promise ready availability of an infinite array of data to users around the world. The data available over the network may include, for example, text, audio, video and other animation, still images and virtual reality sensations. A serious problem in implementing the information superhighway or other wide area communication network is the fundamental conflict between ready access to network data, and the need for the data creators to receive appropriate royalties. Failure to provide adequate royalties for creators may limit the amount and quality of available data. On the other hand, a strict requirement of royalty payments prior to data delivery would place an excessive burden on users, particularly those involved in education and research, and limit the effectiveness of the network as a widely-used communication medium. A prior art technique presently used to ensure royalty payments for data transferred by computer network involves encrypting the data prior to transfer. After a user has paid an appropriate royalty fee, the user receives a decryption key which allows the encrypted data to be converted to a usable form. Unfortunately, encryption often converts the data to a completely unrecognizable form, such that users unfamiliar with the data content will be unable to determine its usefulness without first paying the royalty. Users are placed at a significant disadvantage in conducting research, which often involves examining large amounts of unknown data. For example, a high school or college student using the network to research paintings from a particular period or artist will likely want to browse through a large number of still images, on the order of 100 or more. If a database provider charged the student to view each and every image, the cost would unduly limit the scope of the research. Similar problems are encountered by users interested in copyrighted music or lyrics, newspaper and magazine articles, published court decisions, U.S. and foreign patents, articles in scientific and technical journals, and a wide variety of other data. Although these types of data are currently available in a number of different databases which users may access over a network, prior art techniques generally do not allow users to access any useful portion of the data without first agreeing to pay for the delivered data. For example, U.S. Pat. No. 5,050,213 discloses a prior art system which allows users of an encrypted CD-ROM database to browse through the database on a browsing workstation containing proprietary computer and display components. However, the user typically must perform the browsing at the proprietary workstation, rather than over a network using a standard personal computer, and generally must pay a fee to gain access to data even for browsing. In addition, the user browses a full quality version of the data even though a lesser quality version may be sufficient to determine suitability of the data. This system is thus inefficient and not conducive to widespread data access over a network. The prior art data delivery systems also fail to recognize that a liberal access policy can be in the best interests of information creators. If people are not exposed to high quality information, people will not come to depend on it and seek it out. Providing widespread access to information can thus be considered a form of advertising or investment for creators. As noted above, however, the access should be provided in a way which enables the creators to recover the royalties they deserve. Prior art network data delivery techniques have failed to resolve this conflict satisfactorily and are generally incompatible with the liberal and widespread access goals of the much-publicized information superhighway. As is apparent from the above, a need exists for a method and system which ensure payment of royalties for high quality data delivered over a communication network, without unduly restricting widespread access to the data for browsing, education and other purposes. The present invention provides a method and system for ensuring payment of royalties for data delivered over a communication network. The present invention resolves the conflict between free access and payment of royalties by providing liberal access to partially-degraded data suitable for browsing or research, and charging users a royalty to receive a higher quality version of the data suitable for purposes such as entertainment. In accordance with one aspect of the present invention, a method of delivering data to a user terminal over a communication network is provided. The exemplary method includes the steps of providing a partially-degraded version of the data over the network, without payment of a royalty fee, to a customer at the user terminal; and providing a higher quality version of the data to the customer over the network if the customer is entitled to receive the higher quality version. The determination as to whether the customer is so entitled may be made by, for example, determining if a royalty fee payment has been received. The partially-degraded version of the data is substantially recognizable when displayed, printed, played, run or otherwise utilized at the user terminal, and may be generated by, for example, deleting or encrypting a portion of the undegraded data. The higher quality version may be provided by transmitting the complete higher quality version, or transmitting remaining data such as the deleted portion or a decryption key, to the user terminal. In accordance with another aspect of the present invention, a data delivery method is provided which includes the steps of receiving a request for the data from a customer at one of the user terminals; retrieving the requested data from the information database; partially degrading the data such that after the partial degradation the data remains substantially recognizable when utilized by the customer; transmitting the partially-degraded data to the customer over the communication network; determining whether the customer is entitled to receive a higher quality version of the data; and providing the higher quality version to the customer if the customer is entitled to receive it. In accordance with another aspect of the present invention, a system for delivering data over a network is provided. The system includes a provider database containing the data; and a digital data processor connected to the provider database, for processing a request for the data from a customer at the user terminal, such that a partially-degraded version of the data is provided to the customer over the network, without payment of a royalty fee, and a higher quality version of the data is provided to the customer over the network, for example, upon payment of the royalty fee. Again, the partially-degraded version, when displayed or otherwise utilized at the user terminal, is substantially recognizable to the customer and suitable for limited usage. In accordance with a further aspect of the present invention, a customer may be provided with an option of selecting a version of the desired data at one of a number of different data quality levels, and the amount of royalty payment required to receive a given version may vary depending on the quality level selected. In this manner, users need not pay for a higher quality version of the data than necessary for a given application. The present invention ensures payment of royalties for data received over a communication network in part because delivered data generally remains partially-degraded until an appropriate royalty is paid. At the same time, network users are provided with substantially free access to a wide array of data which is readily recognizable and usable for limited purposes such as research. The users can therefore browse through a variety of available data to decide which data to purchase in a higher quality or undegraded form.
A leader in the development of BSD and Unix technologies for both Apple's iOS and OS X is now leaving the company, it was announced on Tuesday, in order to take the engineering reins at iXsystems. iXsystems revealed on Tuesday the hiring on of Jordan Hubbard, who lists himself as Director of Software Engineering at Apple on his LinkedIn profile. Hubbard, according to iXsystems' announcement , will lead engineering and development at iXsystems, taking up control of open source storage platform FreeNAS and looking to expand the user base for the TrueNAS Unified Storage Appliance. Hubbard will also work on other future projects for emerging enterprise and consumer markets.A software engineer for more than 25 years, Hubbard was a co-founder of the FreeBSD project before signing on with Apple in June of 2001. Since then, he has served as a director of software engineering, developing the BSD and Unix technologies underlying Mac OS X and iOS.Hubbard describes himself as a "long time advocate of and contributor to open source as a method for engineering collaboration and building new communities." He counts user interface design and the making of developer toolkits among his specialties."I’m very excited to have this opportunity to help guide iXsystems through its next phase of professional and technological development,” Hubbard said in a statement. “This is not just a great opportunity for me to be part of a company known for its strong support of FreeBSD and other open source software, but I’m also looking forward to helping it achieve new levels of success with the TrueNAS storage appliance and having the opportunity to create future innovative products."
/** * Condition that works with the {@link GenericFilterOperation} in order to match a single {@link * Row} against multiple database column values. * * @param <V> */ @Value.Immutable public abstract class GenericCondition<V> implements BaseCondition { /** @return Filter operation for the condition. */ @Value.Parameter public abstract GenericFilterOperation<V> getFilterOperation(); /** @return Filter query value. */ @Value.Parameter public abstract V getQueryValue(); /** @return If booleans should be considered as numeric values. */ @Value.Parameter public abstract boolean isNumericBooleans(); /** Validates the value against the predicate. */ @Value.Check protected void validate() { V queryValue = getQueryValue(); getFilterOperation().validateFilterInput(queryValue); } /** * {@inheritDoc} * * <p>This implementation always returns empty. Sub-class to override. */ @Override public Optional<BuiltCondition> getBuiltCondition() { return Optional.empty(); } /** {@inheritDoc} */ @Override public FilterOperationCode getFilterOperationCode() { return getFilterOperation().getOpCode(); } /** {@inheritDoc} */ @Override public boolean isEvaluateOnMissingFields() { return getFilterOperation().isEvaluateOnMissingFields(); } /** {@inheritDoc} */ @Override public boolean test(Row row) { Boolean dbValueBoolean = getBoolean(row, isNumericBooleans()); Double dbValueDouble = getDouble(row); String dbValueString = getString(row); GenericFilterOperation<V> filterOperation = getFilterOperation(); V queryValue = getQueryValue(); // compare against the non-null values, fallback to text compare even if null if (null != dbValueBoolean) { return filterOperation.test(dbValueBoolean, queryValue); } else if (null != dbValueDouble) { return filterOperation.test(dbValueDouble, queryValue); } else { return filterOperation.test(dbValueString, queryValue); } } }
/* The MIT License (MIT) Copyright (c) 2015-? suhetao Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef __STM32F4_SPI_H #define __STM32F4_SPI_H // Includes #include "stm32f4xx.h" #include "stm32f4_rcc.h" //#define SPIx_USE_DMA #define SPIx_BR_CLEAR_MASK ((uint16_t)(0xFFC7)) typedef struct SPI_DRIVER_T { SPI_TypeDef* SPI; RCC_AXXPeriphClockCmd SPI_CLK; uint32_t SPI_Func; GPIO_TypeDef* Gpio; RCC_AXXPeriphClockCmd GPIO_CLK; uint32_t GPIO_Func; GPIO_TypeDef* Gpio_CS; RCC_AXXPeriphClockCmd GPIO_CS_CLK; uint32_t CS_Func; uint16_t CS_Pin; uint16_t SCK_Pin; uint16_t MISO_Pin; uint16_t MOSI_Pin; uint16_t SCK_Src; uint16_t MISO_Src; uint16_t MOSI_Src; SPI_InitTypeDef SPI_Init; #ifdef SPIx_USE_DMA RCC_AXXPeriphClockCmd DMA_CLK; uint32_t DMA_Func; DMA_TypeDef* DMA_TX; DMA_Stream_TypeDef* DMA_TX_Stream; NVIC_InitTypeDef NVIC_DMA_TX; uint32_t DMA_TX_CH; uint32_t DMA_TX_Flag; DMA_TypeDef* DMA_RX; DMA_Stream_TypeDef* DMA_RX_Stream; NVIC_InitTypeDef NVIC_DMA_RX; uint32_t DMA_RX_CH; uint32_t DMA_RX_Flag; #endif uint8_t GPIO_AF_SPI; }SPI_Driver; __inline void Chip_Select(SPI_Driver* SPIx) { GPIO_ResetBits((SPIx)->Gpio_CS, (SPIx)->CS_Pin); } __inline void Chip_DeSelect(SPI_Driver* SPIx){ GPIO_SetBits((SPIx)->Gpio_CS, (SPIx)->CS_Pin); } void SPIx_Init(SPI_Driver* SPIx); void SPIx_DeInit(SPI_Driver* SPIx); uint8_t SPIx_Read_Reg(SPI_Driver* SPIx, uint8_t reg); void SPIx_Write_Reg(SPI_Driver* SPIx, uint8_t regAddr, uint8_t data); void SPIx_Read_Regs(SPI_Driver* SPIx, uint8_t regAddr, uint8_t length, uint8_t* buffer); #ifdef SPIx_USE_DMA void SPIx_DMA_Read_Regs(SPI_Driver* SPIx, uint8_t regAddr, uint8_t length, uint8_t* buffer); #endif uint8_t SPIx_SendByte(SPI_Driver* SPIx, uint8_t byte); uint16_t SPIx_SendWord(SPI_Driver* SPIx, uint16_t word); void SPIx_ReadBytes(SPI_Driver* SPIx, uint8_t length, uint8_t* buffer); void SPIx_SetDivisor(SPI_Driver* SPIx, uint16_t Prescaler); #endif
<gh_stars>1-10 /* Generated by RuntimeBrowser Image: /System/Library/PrivateFrameworks/OnBoardingKit.framework/OnBoardingKit */ @interface OBPrivacyLinkController : UIViewController { bool _allowsOpeningSafari; NSArray * _bundleIdentifiers; NSArray * _bundles; bool _displayCaptionText; unsigned long long _displayDeviceType; bool _displayIcon; NSString * _displayLanguage; bool _displayLargeIcon; bool _presentedViewControllerShouldUseDarkMode; } @property bool allowsOpeningSafari; @property (readonly) OBBundle *bundle; @property (readonly) NSArray *bundles; @property bool displayCaptionText; @property unsigned long long displayDeviceType; @property bool displayIcon; @property (retain) NSString *displayLanguage; @property bool displayLargeIcon; @property (readonly) OBPrivacyFlow *flow; @property (nonatomic) bool presentedViewControllerShouldUseDarkMode; // Image: /System/Library/PrivateFrameworks/OnBoardingKit.framework/OnBoardingKit + (Class)_platformSpecificClass; + (id)linkWithBundleIdentifier:(id)arg1; + (id)linkWithBundleIdentifiers:(id)arg1; - (void).cxx_destruct; - (bool)allowsOpeningSafari; - (id)bundle; - (id)bundles; - (bool)displayCaptionText; - (unsigned long long)displayDeviceType; - (bool)displayIcon; - (id)displayLanguage; - (bool)displayLargeIcon; - (id)flow; - (id)initWithBundleIdentifiers:(id)arg1; - (id)initWithPrivacyBundle:(id)arg1; - (void)linkPressed; - (bool)presentedViewControllerShouldUseDarkMode; - (void)setAllowsOpeningSafari:(bool)arg1; - (void)setDisplayCaptionText:(bool)arg1; - (void)setDisplayDeviceType:(unsigned long long)arg1; - (void)setDisplayIcon:(bool)arg1; - (void)setDisplayLanguage:(id)arg1; - (void)setDisplayLargeIcon:(bool)arg1; - (void)setLinkEnabled:(bool)arg1; - (void)setPresentedViewControllerShouldUseDarkMode:(bool)arg1; - (void)viewDidAppear:(bool)arg1; // Image: /System/Library/PrivateFrameworks/PassKitUI.framework/PassKitUI + (id)pk_privacyLinkForContext:(unsigned long long)arg1; - (void)pk_applyAppearance:(id)arg1; @end
I didn’t know who Nicole Arbour was until this past weekend, when everyone I’ve ever met including my childhood dentist and your mom sent me Arbour’s Dear Fat People video , suggesting I write a rebuttal. Arbour, I quickly gathered, is a Canadian YouTuber whose popularity hinges on the supposed novelty of a woman being simultaneously opinionated, funny and conventionally attractive. (You might have come across her a month ago when her weird, slut-shaming excoriation of “Instagram models” went viral. Policing women’s bodies and self-expression under the guise of empowerment appears to be something of a signature move.) Arbour’s “funny” opinion this week, openly leveraged for attention – “Aaahhh, some people are already really mad at this video!” she chirps, four seconds in – is that fat people are lazy, disgusting, inconsiderate and smelly. It is six minutes of tired cruelty filed under “entertainment.” The only notable thing about Arbour’s video is, perhaps, how dated it feels: while fat people still face daily harassment and systemic discrimination, body-positive activists have gotten enough of a toehold in the public consciousness that, in 2015, most mainstream, non-anonymous media outlets at least have the decency to use coded language when they shame us. Arbour’s rhetoric, by contrast, feels positively 2009: “What are you going to do, fat people? What are you going to do? You going to chase me? I can get away from you by walking at a reasonable pace.” “Fat people parking spots should be at the back of the mall parking lot. Walk to the doors and burn some calories.” “They complain, and they smell like sausages, and I don’t even think they ate sausages, that’s just their aroma. They were so fat that they’re that ‘standing sweat’ fat. Crisco was coming out of their pores.” Broadly speaking, even comedy has moved away from fat jokes that obvious. (Who’s lazy again, by the way?) Arbour showed up late to a losing battle and declared victory. It is, frankly, embarrassing. “I’m over here putting my ass on the line,” she wrote ostentatiously on Twitter , “and being hella brave to try and change the world in a new way.” Indeed, it is “hella brave” and “new” to tell fat people to eat less and exercise more – much like the bravery of Braveheart, or the brave girl from Brave, or the weird old guy who used to come into my work when I was 17 and try to sell me pyramid scheme weight-loss pills that I’m 99% sure were tapeworm eggs mixed with Adderall. The bravery of thin people who exploit and abuse fat people for profit is truly unmatched. I used to spend time squabbling over health and calories and insurance premiums with bad-faith internet jerks like Arbour. I’ve wasted innumerable hours and tears trying to prove my humanity as a fat person. I’ve always thought that if I could just lay my life bare enough, find language visceral enough, write evocatively enough about the ways anti-fat stigma has made my world smaller and dimmer, that it would eventually connect with people, human being to human being. But with Dear Fat People, I just can’t. It’s too pedestrian. Too lazy. Too old. It has been covered ad nauseam, and it doesn’t deserve my vulnerability. So, instead, I want to pull back and say this, to all the Nicole Arbours of the world: You know what, Nicole? I fight for you. Whether you like it or not, whether you realise it or not, your life is tangibly better because of fat women who live unapologetically, who wedge the gates of acceptance open wider every day. I fight for you in your capacity as a woman who wants to be more than just a body. I fight for you in your capacity as a woman whose body is scrutinised and policed every moment of your life. I fight for you in your capacity as a woman who wants to be taken seriously in comedy. I fight for you in your capacity as a woman who wants to be heard, not blamed, when she reports a sexual assault. I fight for you in your capacity as a woman who will eventually age and be told you are without value. I fight for you in your capacity as a woman vulnerable to any number of emotional and physical maladies that could, to your surprise, make you as fat as me. I fight for you in your capacity as a complex, fully formed human being with the right to autonomy over your body, even if that body gets fat. I fight for you even when you are cruel, even when you are making money off the back of fat people’s pain, even when you refuse to fight for me. Because I know that it is hard to have a body, that insecurities make us mean, and that male approval can be a comfortable harbour while it lasts. But you will eventually be kicked out of the club, and when that happens, you may find yourself grateful to those of us who have built a new one. Safe journey.
Raspberry Pi gets launched into atmosphere to take near-space pictures The Rapsberry Pi is an impressive mini-computer we saw launch recently, and it’s already won the hearts of many. An individual from the UK named Dave Akerman was apparently looking to have a real adventure with his new Pi, so he strapped the gadget to a weather balloon for it to be sent up into Earth’s atmosphere to take pictures from near-space. And it actually did quite well. The photos can be viewed on Akerman’s Flickr account. Akerman saw that the Pi had had a USB port offering fast and easy access to a webcam, so he could obtain live images sent down to him from the payload. Dubbed the “Raspberry Pi In The Sky” project, the weather balloon reached about 40 kilometers before bursting, which is not bad since most balloons carrying small loads like tracking devices or sensors for temperature and pressure can reach altitudes of about 30 kilometers or so. Akerman admitted that there were a lot of challenges faced along the way, including the obviously harsh environment from reaching near-space, which has less than one percent atmosphere with temperatures dropping down to -50 degrees Celsius. The Pi itself also had its own set of challenges to work with, which included an SD card, power requirements and operating system. You can check out Akerman’s launch video below and his blog post here. [via WebProNews]
import { APIError, Client, FetchQueue, RawStickerData, Sticker, StickerResolvable } from "../../../internal"; import getRoute from "../../../util/getRoute"; export default async function getSticker(client: Client, stickerResolvable: StickerResolvable): Promise<Sticker | undefined> { // Resolve objects const stickerID: string | undefined = Sticker.resolveID(stickerResolvable); if (!stickerID) throw new Error("Invalid sticker resolvable"); // Define fetch data const path: string = `/stickers/${stickerID}`; const method: string = "GET"; const route: string = getRoute(path, method); // Get fetch queue const fetchQueue: FetchQueue = client._getFetchQueue(route); // Add to fetch queue let unknownSticker: boolean = false; const result: RawStickerData = await fetchQueue.request({ path, method }).catch((err: APIError) => { // Unknown sticker if (err.code === 10060) unknownSticker = true; // Throw error else throw err; }); // Unknown sticker if (unknownSticker) return; // Parse sticker const sticker: Sticker = Sticker._fromRawData(client, result); // Return return sticker; }
Escalation in Therapy Based on Intravenous Magnesium Sulfate Dosing in Pediatric Patients With Asthma Exacerbations. OBJECTIVE Our objective was to compare doses of intravenous magnesium sulfate and their association with escalations in therapy in children and adolescents presenting to the emergency department with an asthma exacerbation. METHODS This was a retrospective cohort study among children who received both magnesium sulfate and standard of care therapy for asthma exacerbations. A classification and regression tree (CART) analysis was performed to identify a breakpoint in dose in which a difference in the primary outcome was present. The primary endpoint was need for escalation in therapy within 24 hours of initial magnesium sulfate dose, defined as need for invasive or non-invasive mechanical ventilation or need for adjunctive therapy, that is, epinephrine, terbutaline, aminophylline, theophylline, ketamine, heliox, or additional doses of magnesium sulfate. RESULTS A total of 210 patients were included in the study. A CART analysis identified that a breakpoint of 27 mg/kg of magnesium was associated with a difference in the primary outcome of escalation in therapy in patients <40 kg. A subgroup analysis of patients <40 kg (n = 149) found patients who received magnesium doses >27 mg/kg had a higher incidence of the primary outcome of escalation in therapy, 15 patients (18.3%) versus 3 patients (4.5%) in the ≤27-mg/kg/dose group (p = 0.011). CONCLUSIONS Our results demonstrate larger doses of magnesium sulfate are associated with an increased need for invasive or non-invasive mechanical ventilation or need for adjunctive therapy(ies). Our findings are limited by confounding factors that may have influenced this outcome in our population.
def _map_network(self, ip_string, net_mask_string, network_mapping): ip = IpValidation.validate_ip_address(ip_string) net_mask = IpValidation.validate_ip_address(net_mask_string) net_address = next( (net_address for net_address in network_mapping if ip in net_address and net_address.netmask == net_mask), None ) if net_address is None: net_address = next( (net_address for net_address in network_mapping if self.PUBLIC_IP_MATCHER in net_address), None ) if net_address is None: raise NetworkMapper.NoMappingFoundException( 'no matching network mapping was found, for the following ip: {ip}'.format(ip=str(ip)) ) return network_mapping[net_address]
<reponame>adamwalz/Jupyter-Notebooks<filename>machine_learning/tutorials/parallel_ml_tutorial/fetch_data.py import numpy as np import os try: from urllib import urlopen except ImportError: from urllib.request import urlopen import tarfile import zipfile import gzip from sklearn.datasets import load_files from sklearn.externals import joblib TWENTY_URL = ("http://people.csail.mit.edu/jrennie/" "20Newsgroups/20news-bydate.tar.gz") TWENTY_ARCHIVE_NAME = "20news-bydate.tar.gz" TWENTY_CACHE_NAME = "20news-bydate.pkz" TWENTY_TRAIN_FOLDER = "20news-bydate-train" TWENTY_TEST_FOLDER = "20news-bydate-test" SENTIMENT140_URL = ("http://cs.stanford.edu/people/alecmgo/" "trainingandtestdata.zip") SENTIMENT140_ARCHIVE_NAME = "trainingandtestdata.zip" COVERTYPE_URL = ('http://archive.ics.uci.edu/ml/' 'machine-learning-databases/covtype/covtype.data.gz') # Source: https://www.kaggle.com/c/titanic-gettingStarted/data TITANIC_URL = ("https://dl.dropboxusercontent.com/" "u/5743203/data/titanic/titanic_train.csv") def get_datasets_folder(): here = os.path.dirname(__file__) datasets_folder = os.path.abspath(os.path.join(here, 'datasets')) datasets_archive = os.path.abspath(os.path.join(here, 'datasets.zip')) if not os.path.exists(datasets_folder): if os.path.exists(datasets_archive): print("Extracting " + datasets_archive) zf = zipfile.ZipFile(datasets_archive) zf.extractall('.') assert os.path.exists(datasets_folder) else: print("Creating datasets folder: " + datasets_folder) os.makedirs(datasets_folder) else: print("Using existing dataset folder:" + datasets_folder) return datasets_folder def check_twenty_newsgroups(datasets_folder): print("Checking availability of the 20 newsgroups dataset") archive_path = os.path.join(datasets_folder, TWENTY_ARCHIVE_NAME) train_path = os.path.join(datasets_folder, TWENTY_TRAIN_FOLDER) test_path = os.path.join(datasets_folder, TWENTY_TEST_FOLDER) if not os.path.exists(archive_path): print("Downloading dataset from %s (14 MB)" % TWENTY_URL) opener = urlopen(TWENTY_URL) open(archive_path, 'wb').write(opener.read()) else: print("Found archive: " + archive_path) if not os.path.exists(train_path) or not os.path.exists(test_path): print("Decompressing %s" % archive_path) tarfile.open(archive_path, "r:gz").extractall(path=datasets_folder) print("Checking that the 20 newsgroups files exist...") assert os.path.exists(train_path) assert os.path.exists(test_path) print("=> Success!") def check_sentiment140(datasets_folder): print("Checking availability of the sentiment 140 dataset") archive_path = os.path.join(datasets_folder, SENTIMENT140_ARCHIVE_NAME) sentiment140_path = os.path.join(datasets_folder, 'sentiment140') train_path = os.path.join(sentiment140_path, 'training.1600000.processed.noemoticon.csv') test_path = os.path.join(sentiment140_path, 'testdata.manual.2009.06.14.csv') if not os.path.exists(archive_path): print("Downloading dataset from %s (77MB)" % SENTIMENT140_URL) opener = urlopen(SENTIMENT140_URL) open(archive_path, 'wb').write(opener.read()) else: print("Found archive: " + archive_path) if not os.path.exists(sentiment140_path): print("Extracting %s to %s" % (archive_path, sentiment140_path)) zf = zipfile.ZipFile(archive_path) zf.extractall(sentiment140_path) print("Checking that the sentiment 140 CSV files exist...") assert os.path.exists(train_path) assert os.path.exists(test_path) print("=> Success!") def check_covertype(datasets_folder): print("Checking availability of the covertype dataset") archive_path = os.path.join(datasets_folder, 'covtype.data.gz') covtype_dir = os.path.join(datasets_folder, "covertype") samples_path = os.path.join(covtype_dir, "samples.pkl") targets_path = os.path.join(covtype_dir, "targets.pkl") if not os.path.exists(covtype_dir): os.makedirs(covtype_dir) if not os.path.exists(archive_path): print("Downloading dataset from %s (10.7MB)" % COVERTYPE_URL) open(archive_path, 'wb').write(urlopen(COVERTYPE_URL).read()) else: print("Found archive: " + archive_path) if not os.path.exists(samples_path) or not os.path.exists(targets_path): print("Parsing the data and splitting input and labels...") f = open(archive_path, 'rb') Xy = np.genfromtxt(gzip.GzipFile(fileobj=f), delimiter=',') X = Xy[:, :-1] y = Xy[:, -1].astype(np.int32) joblib.dump(X, samples_path) joblib.dump(y, targets_path ) print("=> Success!") def check_titanic(datasets_folder): print("Checking availability of the titanic dataset") csv_filename = os.path.join(datasets_folder, 'titanic_train.csv') if not os.path.exists(csv_filename): print("Downloading titanic data from %s" % TITANIC_URL) open(csv_filename, 'wb').write(urlopen(TITANIC_URL).read()) print("=> Success!") if __name__ == "__main__": import sys datasets_folder = get_datasets_folder() check_twenty_newsgroups(datasets_folder) check_titanic(datasets_folder) if 'sentiment140' in sys.argv: check_sentiment140(datasets_folder) if 'covertype' in sys.argv: check_covertype(datasets_folder)
/** * Called when callable.call() returns without an exception. * * @param operation the name of what is being retried. *usually* the name of a b2 operation. * @param attemptsSoFar how many times have we called callable.call() so far? * @param tookMillis how long did this attempt take? */ @SuppressWarnings("unused") default void succeeded(String operation, int attemptsSoFar, long tookMillis) { }
Ryo Ishikawa Amateur career On 20 May 2007, Ishikawa became the youngest winner ever of a men's regular tournament on the Japan Golf Tour by winning the Munsingwear Open KSB Cup at the age 15 years and 8 months. He competed as an amateur and it was Ishikawa's first tour appearance. He finished one shot ahead of Japan's 9th top ranked player at the time, Katsumasa Miyamoto. The highest ranked player on the Official World Golf Ranking who took part in the event was Toru Taniguchi who finished T13, 6 shots shy of Ishikawa. Taniguchi ranked number 86 in the world after the event. Professional career Ishikawa turned professional in 2008 and won another Japan Golf Tour tournament, the mynavi ABC Championship. By the close of 2008 he had become the youngest ever player to reach the top 100 of the Official World Golf Rankings. Ishikawa played in PGA Tour tournaments for the first time in 2009. He was cut from the Northern Trust Open, the Arnold Palmer Invitational and the 2009 Masters Tournament. He finished 71st at the Transitions Championship. On 28 June 2009, Ishikawa won the Gateway to the Open Mizuno Open Yomiuri Classic on the Japan Golf Tour to qualify for the 2009 Open Championship, the first major event he has qualified for without receiving a special exemption. With four wins on the Japan Golf Tour in 2009, in September, Ishikawa became the youngest golfer ever to reach the top 50 of the Official World Golf Rankings. Ishikawa dominated the Japan Golf Tour for much of the 2009 season and has been the top-ranked Japanese player in the World Rankings. On 18 October, he tied for second at the Japan Open, losing to Ryuichi Oda on the second hole of a playoff. He finished the season as the money list leader on the Japan Golf Tour with ¥ 183.52 million. At the Japan GTO awards, held in December 2009, Ishikawa earned 9 titles. In addition to top money earner, he was named MVP, best scoring average (69.93), best putting average (1.724), highest birdie haul (4.42), etc. On 2 May 2010, in the final round of The Crowns, he shot a 12-under-par 58 to win the tournament by five strokes. The 58 was the lowest score ever carded in a Japan Golf Tour event, eclipsing a 59 achieved in the first round of 2003 Acom International by Masahiro Kuramoto, and lowest ever on any major golf tour. His round consisted of 12 birdies and six pars. However, because the course was a par-70 (versus the par-72 courses where some players shot 59), the record is not the lowest in relation to par. Ishikawa caught the attention of American golf fans at the 2010 U.S. Open. Wearing a bright bubblegum pink outfit, he played under par on the first day and was tied for second after the second day before falling back over the weekend. On 30 March 2011 Ishikawa announced that he will be donating all of his 2011 tour earnings, plus an additional ¥ 100,000 for every birdie he makes during the year, to the Japan earthquake relief efforts. On 11 March 2012, the one-year anniversary of the Japan earthquake, Ishikawa finished runner-up to George McNeill in the Puerto Rico Open, his highest PGA Tour finish thus far. Just over a week later, Ishikawa became a member of the PGA Tour. The second-place finish earned Special Temporary Membership by exceeding $411,943, or 150th on the PGA Tour's 2011 money list. Ishikawa played on the PGA Tour in 2013. He made 13 cuts in 23 events, finishing 149th on the money list and missing the FedEx Cup playoffs (ranked 141st). He played the Web.com Tour Finals and finished 13th to retain his PGA Tour card for 2014. Ishikawa got nine top-25s and made 14 cuts during the 2014 season, including a second-place finish at the Shriners Hospitals for Children Open and a T-5 at the unofficial ISPS Handa World Cup of Golf.
. Cytospectrophotometric measurement of DNA level of interphase nuclei versus their cross-reaction area was carried out in mucosal cells of the stomach. The material included gastric bioptates from healthy subjects and patients with single and multiple adenomatous polyps of the stomach as well as gastric cancer. Tumor cells, and to a lesser degree, those of multiple polyps exhibited certain changes in DNA content: histograms showed several peaks and variations in DNA level increased, the latter being unmatched by a rise in DNA content per nucleus. A correlation was established between DNA content and cross-reaction area of interphase nuclei. Therefore, both parameters may serve for diagnosis.
Retrieval of Daily PM2.5 Concentrations Using Nonlinear Methods: A Case Study of the Beijing-Tianjin-Hebei Region, China Exposure to fine particulate matter (PM2.5) is associated with adverse health impacts on the population. Satellite observations and machine learning algorithms have been applied to improve the accuracy of the prediction of PM2.5 concentrations. In this study, we developed a PM2.5 retrieval approach using machine-learning methods, based on aerosol products from the Moderate Resolution Imaging Spectroradiometer (MODIS) aboard the NASA Earth Observation System (EOS) Terra and Aqua polar-orbiting satellites, near-ground meteorological variables from the NASA Goddard Earth Observing System (GEOS), and ground-based PM2.5 observation data. Four models, which are orthogonal regression (OR), regression tree (Rpart), random forests (RF), and support vector machine (SVM), were tested and compared in the BeijingTianjinHebei (BTH) region of China in 2015. Aerosol products derived from the Terra and Aqua satellite sensors were also compared. The 10-repeat 5-fold cross-validation (10 5 CV) method was subsequently used to evaluate the performance of the different aerosol products and the four models. The results show that the performance of the Aqua dataset was better than that of the Terra dataset, and that the RF algorithm has the best predictive performance (Terra: R = 0.77, RMSE = 43.51 g/m3; Aqua: R = 0.85, RMSE = 33.90 g/m3). This study shows promise for predicting the spatiotemporal distribution of PM2.5 using the RF model and Aqua aerosol product with the assistance of PM2.5 site data.
Field Embodiments of the present disclosure generally relate to devices for controlled release of a supplement or a medicine and/or storage of animal management information. Description of the Related Art A large number of grazing species of animals, including cattle, sheep, goats and deer are classified as ruminant animals. Such animals possess four stomach compartments as part of their digestive system. These animals rely largely on the digestion of grass and other native vegetation for nutrients and sustenance. However, there are large tracts of grasslands throughout the world that are deficient in one or more of the mineral elements required by grazing animals. A convenient way of supplying these animals with minerals, vitamins or other dietary or medicinal needs is by means of a bolus. A bolus is an object containing and releasing the required supplement or medicine at the required rate to improve or maintain the health of the animal. Such a device is administered to the animal by mouth and lodges naturally (by means of being sufficiently dense or by being fitted with tags or wings which deploy after administration) in either of the first two stomach compartments of the subject animal. Thereafter, the supplement or medicament is released over a period of time influenced by the size, shape and constituent ingredients of the bolus. Many different bolus designs have been utilized to satisfy the particular needs of animals, especially sheep and cattle under different grazing conditions. The use of boluses in the treatment of ruminants is well known in the veterinary field. Such products are often weighted by a heavy density substance, such as iron or sand, in order to remain in the rumen to release a medicament. If sustained release coatings are present, the release is gradual until the source of medicine is exhausted. However, such bolus designs are limited to sustained release and not time controlled release. Thus, the supplement or medicine is administered as required or at a generally constant rate over a limited period of time. Further, the use of multiple drugs simultaneously, which are not part of an approved combination, in a standard bolus would require significant testing and regulatory approval. As such, the creation of certain combination drugs would require immense cost and time for regulatory approval. Additionally, the locations and other pertinent data of the ruminant animals need to be tracked and stored. Conventional ways of tracking these animals is with ear identification tags, RFID tags, or ruminal boluses. However, ear identification tags are only readable over a small range and require expensive readers, and RFID tags and ruminal boluses are expensive. Thus, there is a need in the art for a supplement or medicine delivery system and an animal management information storage device which overcome the above described limitations.
Yeovil Town entered the fourth round of the FA Cup following a dramatic draw with fellow League Two side Carlisle United. The game took place at Blackpool’s Bloomfield Road because of the flooding in the Carlisle area, meaning a shorter trip for Town and their loyal fans. Goals from Danny Grainger and Mark Ellis were both cancelled out by Francois Zoko and sub Shaun Jeffers. Manager Darren Way made two changes from the side that beat York City the previous weekend. Josh Sheehan returned to parent club Swansea City and was replaced by Everton loanee Liam Walsh. The other change for Town was up front with Shaun Jeffers making way for West Bromwich Albion loanee Tahvon Campbell. The Glovers started brightly and enjoyed the majority of play in the opening stages. But despite their bright start it was Carlisle who nearly opened the scoring just before the 15- minute mark. A long ball over the back line was chested down well by Jabo Ibehre inside the 18-yard box, but the big striker fired wide whilst under pressure from Dickson. The ‘home’ side did open the scoring after 25 minutes. A strong shout for handball by Ibehre by the Glovers was waved away by the referee only for the man in the middle to blow for a handball by Tozer on the edge of the Town area, which looked exactly the same as the decision the referee waved away just seconds before. Carlisle’s skipper Grainger stepped up take the resulting free-kick and his effort took a slight deflection off the wall and nestled in the corner of the net. Five minutes later and at the other end Town nearly levelled. Campbell’s clever run into the area saw him find enough space to fire a shot at goal, only for his effort to be parried by Hanford. From the rebound Zoko couldn’t quite get high enough to head home into an empty net. Just before the half time whistle the home side had two clear cut chances but found keeper Krysiak in fine form. The Polish shot stopper firs denied Charlie Wyke when the striker was through on goal and followed that up with a point black save deny Raynes from close range. Ten minutes into the second half and the home side nearly doubled their lead and would have done if it wasn’t for the woodwork. A superb strike from Grainger from 25 yards came back off the bar into the path of Wyke, who somehow hit the post and with the ball rolling along the line Town managed to re-gather and clear away any danger. At the other end Carlisle keeper Hanford had to be on his toes when he palmed away a Sokolik header to keep the score at 1-0. Town did level the scores and deservedly so on 71 minutes. Patient build up starting with Tozer and Dolan exchanging passes before Tozer found Ward in space. The captain’s cross was pin point on the head of Zoko, who looped his header over Hanford and into the corner of the net. The Glovers were only level for five minutes before Carlisle regained the lead. A free kick on the left was whipped into the box by Gilliead and with a Town defender slipping the ball was met by Ellis, who headed powerfully home from close range. With just under five minutes remaining Town could have drew level when Darren Ward met a Roberts cross, only to see his header go over the bar. Deep into injury time Town drew level when sub Shaun Jeffers turned in the area to fire past Hanford, much to the delight of the travelling faithful behind the goal at the opposite end.
// C++ includes #include <iostream> // for io related stuff #include <sstream> // for std::stringstream #include <string> // for std::string #include <thread> // std::this_thread::sleep_for() // External Library includEs #include "../danlib/danlib.h" using namespace std; int main() { cout << "IFile: reads AFiles, shuffles them and prints to I-Files" << endl; DanLib::FileTool ft; for (int x = 1; x <= 11; x++) // x <= 11 { for (int y = 1; y <= 5; y++) // x <= 5 { // calculate the names of the files stringstream iFileName, oFileName; iFileName << "bin/output/afile/A-File-" << x << "-" << y; oFileName << "bin/output/ifile/I-File-" << x << "-" << y; cout << "Opening " << iFileName.str() << " and " << oFileName.str() << endl; // open the input file (A-FILE) bool success = ft.InitIFile(iFileName.str()); if (success) { // open the output file (I-FILE) ft.InitOFile(oFileName.str()); // read each line into a placeholder string string CurrentLine; while (ft.ReadLineFromFile(CurrentLine)) { vector<string> stringsFromLine; // Break each string into a vector of tokens based on the delimeter DanLib::TokenizeString(CurrentLine, "|", true, stringsFromLine); vector<int> currentData; //cast the vector of strings to ints and copy to vector of ints. for (auto elem : stringsFromLine) { int temp = atoi(elem.c_str()); currentData.push_back(temp); } // make sure the system sleeps for at least a second so that the rng gets seeded with a new seed every time. std::this_thread::sleep_for(std::chrono::seconds(1)); // seed the rng srand(time(0)); // shuffle the vector using the fisher-yates algorithm, conveniently built into C++ for us. std::random_shuffle(currentData.begin(), currentData.end()); ft.WriteVectorToFile(currentData, "|" ); }; } // finally, close the filetool and start over ft.Close(); } } cout << "\nPress Return key to exit"; cin.get(); return 0; }
Evidence for an action of heptaminol hydrochloride (HeptamylR) on the central nervous system based on an increase in intracranial selfstimulation behavior in the mouse The effects of intraperitoneal injections of heptaminol hydrochloride (HeptamylR) on intracranial selfstimulation (ICSS) in the lateral hypothalamus (LH) were studied in BALB/c mice. In a first experiment, performed in a lever press box, we observed that ICSS induced by brief (0.2 sec) electrical stimulation was enhanced by heptaminol. However, at the two doses administered (34.6 and 60 mg/kg), increases of the leverpressing rate were observed only in animals showing high levels of performance before the treatment. In a second experiment, mice were trained in a shuttlebox to initiate and terminate a continuous stimulation of the LH. In animals showing a weak approach response before the treatment, heptaminol induced a significant reduction of approach latency but only at the dose of 60 mg/kg. In contrast, animals showing a strong approach response appeared more sensitive to heptaminol since a reduction of approach latency was observed at the two doses administered. No concomitant modification of escape latency was observed. These results suggest that heptaminol modulates activity of certain neuronal mechanisms involved in ICSS regulation and that a relationship exists between the reactivity to heptaminol and the sensitivity of hypothalamic sites to electrical stimulation.
<reponame>pengxiaotian/naive-ui<gh_stars>1-10 import { h, defineComponent, computed, PropType, renderSlot, CSSProperties } from 'vue' import { useConfig, useTheme } from '../../_mixins' import type { ThemeProps } from '../../_mixins' import { createKey } from '../../_utils' import type { ExtractPublicPropTypes } from '../../_utils' import { InfoIcon, SuccessIcon, WarningIcon, ErrorIcon } from '../../_internal/icons' import { NBaseIcon } from '../../_internal' import { resultLight } from '../styles' import type { ResultTheme } from '../styles' import image404 from './404' import image500 from './500' import image418 from './418' import image403 from './403' import style from './styles/index.cssr' const imgMap = { 403: image403, 404: image404, 418: image418, 500: image500 } const iconMap = { info: <InfoIcon />, success: <SuccessIcon />, warning: <WarningIcon />, error: <ErrorIcon /> } const resultProps = { ...(useTheme.props as ThemeProps<ResultTheme>), size: { type: String as PropType<'small' | 'medium' | 'large' | 'huge'>, default: 'medium' }, status: { type: String as PropType< 'info' | 'success' | 'warning' | 'error' | '404' | '403' | '500' | '418' >, default: 'info' }, title: String, description: String } export type ResultProps = ExtractPublicPropTypes<typeof resultProps> export default defineComponent({ name: 'Result', props: resultProps, setup (props) { const { mergedClsPrefixRef } = useConfig(props) const themeRef = useTheme( 'Result', 'Result', style, resultLight, props, mergedClsPrefixRef ) return { mergedClsPrefix: mergedClsPrefixRef, cssVars: computed(() => { const { size, status } = props const { common: { cubicBezierEaseInOut }, self: { textColor, lineHeight, titleTextColor, titleFontWeight, [createKey('iconColor', status)]: iconColor, [createKey('fontSize', size)]: fontSize, [createKey('titleFontSize', size)]: titleFontSize, [createKey('iconSize', size)]: iconSize } } = themeRef.value return { '--n-bezier': cubicBezierEaseInOut, '--n-font-size': fontSize, '--n-icon-size': iconSize, '--n-line-height': lineHeight, '--n-text-color': textColor, '--n-title-font-size': titleFontSize, '--n-title-font-weight': titleFontWeight, '--n-title-text-color': titleTextColor, '--n-icon-color': iconColor } }) } }, render () { const { status, $slots, mergedClsPrefix } = this return ( <div class={`${mergedClsPrefix}-result`} style={this.cssVars as CSSProperties} > <div class={`${mergedClsPrefix}-result-icon`}> {status in imgMap ? ( imgMap[status as unknown as keyof typeof imgMap] ) : ( <NBaseIcon clsPrefix={mergedClsPrefix}> {{ default: () => iconMap[status as keyof typeof iconMap] }} </NBaseIcon> )} </div> <div class={`${mergedClsPrefix}-result-header`}> <div class={`${mergedClsPrefix}-result-header__title`}> {this.title} </div> <div class={`${mergedClsPrefix}-result-header__description`}> {this.description} </div> </div> {$slots.default ? ( <div class={`${mergedClsPrefix}-result-content`}>{$slots}</div> ) : null} <div class={`${mergedClsPrefix}-result-footer`}> {renderSlot($slots, 'footer')} </div> </div> ) } })
package rms.demo.config.mybatis; /** * @author : Meredith * @date : 2019-05-18 23:39 * @description : */ import org.mybatis.spring.mapper.MapperScannerConfigurer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class MyBatisConfig { @Bean public MapperScannerConfigurer MapperScannerConfigurer() { MapperScannerConfigurer mapperScannerConfigurer = new MapperScannerConfigurer(); mapperScannerConfigurer.setBasePackage("rms.demo.dao"); mapperScannerConfigurer.setSqlSessionFactoryBeanName("sqlSessionFactory"); return mapperScannerConfigurer; } }
package org.gearvrf.widgetlib.widget.basic; import org.gearvrf.widgetlib.widget.NodeEntry; import org.gearvrf.widgetlib.widget.Widget; import org.gearvrf.GVRContext; import org.gearvrf.GVRMesh; import org.gearvrf.GVRRenderData; import org.gearvrf.GVRSceneObject; import org.json.JSONObject; /** * A radio button is a two-state button that can be either checked or unchecked. When the radio * button is unchecked, the user can press or click it to check it. However, contrary to a * CheckBox, a radio button cannot be unchecked by the user once checked. * Radio buttons are normally used together in a RadioGroup. */ public class RadioButton extends CheckableButton { /** * Create new instance of RadioButton with specified size * @param context * @param width button width * @param height button height */ public RadioButton(GVRContext context, float width, float height) { super(context, width, height); } /** * Create new instance of RadioButton with specified size * @param context */ public RadioButton(GVRContext context) { super(context); } /** * Create new instance of RadioButton with specified size * @param context */ public RadioButton(GVRContext context, JSONObject properties) { super(context, properties); } /** * Create new instance of RadioButton wrapping around GVRF sceneObject; parsed from the model * * @param context * @param sceneObject * @param attributes * @throws InstantiationException */ @Deprecated public RadioButton(GVRContext context, GVRSceneObject sceneObject, NodeEntry attributes) throws InstantiationException { super(context, sceneObject, attributes); } /** * Create new instance of RadioButton wrapping around GVRF sceneObject * * @param context * @param sceneObject * @throws InstantiationException */ public RadioButton(GVRContext context, GVRSceneObject sceneObject) { super(context, sceneObject); } protected RadioButton(GVRContext context, GVRMesh mesh) { super(context, mesh); } /** * Change the checked state of the button to the inverse of its current state. * If the radio button is already checked, this method will not toggle the radio button. */ @Override public void toggle() { if (!isChecked()) { super.toggle(); } } @Override protected Widget createGraphicWidget() { return new Graphic(getGVRContext(), getHeight()); } static private class Graphic extends Widget { Graphic(GVRContext context, float size) { super(context, size, size); setRenderingOrder(GVRRenderData.GVRRenderingOrder.TRANSPARENT); } } }
/*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ // MITK - DataCollection #include <mitkDiffusionCollectionReader.h> #include <mitkDiffusionCollectionWriter.h> #include <mitkDataCollection.h> #include <mitkImageCast.h> #include "mitkDataCollectionImageIterator.h" #include <mitkCollectionStatistic.h> #include <mitkTumorInvasionClassification.h> // CTK #include "mitkCommandLineParser.h" // ITK #include <itkImageRegionIterator.h> using namespace std; int main(int argc, char *argv[]) { // Setup CLI Module parsable interface mitkCommandLineParser parser; parser.setTitle("Tumor Invasion Analysis"); parser.setCategory("Tumor Analysis"); parser.setDescription("Learns and predicts Invasion behavior"); parser.setContributor("MBI"); parser.setArgumentPrefix("--", "-"); // Add command line argument names parser.addArgument("help", "h", mitkCommandLineParser::Bool, "Show options"); parser.addArgument("loadFile", "l", mitkCommandLineParser::InputFile, "DataCollection File"); parser.addArgument( "colIds", "c", mitkCommandLineParser::String, "Patient Identifiers from DataCollection used for training"); parser.addArgument( "testId", "t", mitkCommandLineParser::String, "Patient Identifier from DataCollection used for testing"); parser.addArgument("features", "b", mitkCommandLineParser::String, "Features"); parser.addArgument("stats", "s", mitkCommandLineParser::String, "Output file for stats"); parser.addArgument("ratio", "q", mitkCommandLineParser::Float, "ratio of tumor to healthy"); parser.addArgument("treeDepth", "d", mitkCommandLineParser::Int, "limits tree depth"); parser.addArgument("forestSize", "f", mitkCommandLineParser::Int, "number of trees"); parser.addArgument("samplingMode", "m", mitkCommandLineParser::Int, "mode of sample selection"); parser.addArgument("configName", "n", mitkCommandLineParser::String, "human readable name for configuration"); parser.addArgument("output", "o", mitkCommandLineParser::OutputDirectory, "output folder for results"); parser.addArgument("forest", "t", mitkCommandLineParser::OutputFile, "store trained forest to file"); map<string, us::Any> parsedArgs = parser.parseArguments(argc, argv); // Show a help message if (parsedArgs.size() == 0) return EXIT_SUCCESS; if (parsedArgs.count("help") || parsedArgs.count("h")) { std::cout << parser.helpText(); return EXIT_SUCCESS; } // Default values float ratio = 1.0; bool useStatsFile = false; unsigned int forestSize = 250; unsigned int treeDepth = 0; unsigned int samplingMode = 1; std::string configName = ""; std::string outputFolder = ""; std::string forestFile = ""; std::vector<std::string> features; std::vector<std::string> trainingIds; std::vector<std::string> testingIds; std::vector<std::string> loadIds; // features + masks needed for training and evaluation std::string outputFile; std::string xmlFile; std::ofstream experimentFS; // Parse input parameters { if (parsedArgs.count("colIds") || parsedArgs.count("c")) { std::istringstream ss(us::any_cast<string>(parsedArgs["colIds"])); std::string token; while (std::getline(ss, token, ',')) trainingIds.push_back(token); } if (parsedArgs.count("output") || parsedArgs.count("o")) { outputFolder = us::any_cast<string>(parsedArgs["output"]); } if (parsedArgs.count("configName") || parsedArgs.count("n")) { configName = us::any_cast<string>(parsedArgs["configName"]); } if (parsedArgs.count("features") || parsedArgs.count("b")) { std::istringstream ss(us::any_cast<string>(parsedArgs["features"])); std::string token; while (std::getline(ss, token, ',')) features.push_back(token); } if (parsedArgs.count("treeDepth") || parsedArgs.count("d")) { treeDepth = us::any_cast<int>(parsedArgs["treeDepth"]); } if (parsedArgs.count("ratio") || parsedArgs.count("q")) { ratio = us::any_cast<float>(parsedArgs["ratio"]); } if (parsedArgs.count("forestSize") || parsedArgs.count("f")) { forestSize = us::any_cast<int>(parsedArgs["forestSize"]); } if (parsedArgs.count("samplingMode") || parsedArgs.count("m")) { samplingMode = us::any_cast<int>(parsedArgs["samplingMode"]); } if (parsedArgs.count("stats") || parsedArgs.count("s")) { useStatsFile = true; experimentFS.open(us::any_cast<string>(parsedArgs["stats"]).c_str(), std::ios_base::app); } if (parsedArgs.count("forest") || parsedArgs.count("t")) { forestFile = us::any_cast<string>(parsedArgs["stats"]); } if (parsedArgs.count("testId") || parsedArgs.count("t")) { std::istringstream ss(us::any_cast<string>(parsedArgs["testId"])); std::string token; while (std::getline(ss, token, ',')) testingIds.push_back(token); } for (unsigned int i = 0; i < features.size(); i++) { loadIds.push_back(features.at(i)); } loadIds.push_back("GTV"); loadIds.push_back("BRAINMASK"); loadIds.push_back("TARGET"); if (parsedArgs.count("stats") || parsedArgs.count("s")) { outputFile = us::any_cast<string>(parsedArgs["stats"]); } if (parsedArgs.count("loadFile") || parsedArgs.count("l")) { xmlFile = us::any_cast<string>(parsedArgs["loadFile"]); } else { MITK_ERROR << parser.helpText(); return EXIT_FAILURE; } } mitk::DataCollection::Pointer trainCollection; mitk::DataCollection::Pointer testCollection; { mitk::DiffusionCollectionReader colReader; // Load only relevant images colReader.SetDataItemNames(loadIds); colReader.AddSubColIds(testingIds); testCollection = colReader.LoadCollection(xmlFile); colReader.ClearDataElementIds(); colReader.ClearSubColIds(); colReader.SetDataItemNames(loadIds); colReader.AddSubColIds(trainingIds); trainCollection = colReader.LoadCollection(xmlFile); } std::cout << "Setup Training" << std::endl; mitk::TumorInvasionClassification classifier; classifier.SetClassRatio(ratio); classifier.SetTrainMargin(7, 1); classifier.SamplesWeightingActivated(true); classifier.SelectTrainingSamples(trainCollection, samplingMode); // Learning stage std::cout << "Start Training" << std::endl; classifier.LearnProgressionFeatures(trainCollection, features, forestSize, treeDepth); if (forestFile != "") classifier.SaveRandomForest(forestFile); std::cout << "Start Predict" << std::endl; classifier.PredictInvasion(testCollection, features); if (false && outputFolder != "") { std::cout << "Saving files to " << outputFolder << std::endl; mitk::DiffusionCollectionWriter::ExportCollectionToFolder(trainCollection, "/tmp/dumple"); } classifier.SanitizeResults(testCollection); { mitk::DataCollectionImageIterator<unsigned char, 3> gtvIt(testCollection, "GTV"); mitk::DataCollectionImageIterator<unsigned char, 3> result(testCollection, "RESULTOPEN"); while (!gtvIt.IsAtEnd()) { if (gtvIt.GetVoxel() != 0) { result.SetVoxel(2); } result++; gtvIt++; } } mitk::CollectionStatistic stat2; mitk::ProgressionValueToIndexMapper progressionValueToIndexMapper; mitk::BinaryValueToIndexMapper binaryValueToIndexMapper; stat2.SetCollection(testCollection); stat2.SetClassCount(2); stat2.SetGoldName("TARGET"); stat2.SetTestName("RESULTOPEN"); stat2.SetMaskName("BRAINMASK"); stat2.SetGroundTruthValueToIndexMapper(&binaryValueToIndexMapper); stat2.SetTestValueToIndexMapper(&progressionValueToIndexMapper); stat2.Update(); stat2.ComputeRMSD(); // FIXME: DICE value available after calling Print method std::ostringstream out2; stat2.Print(out2, std::cout, true); std::cout << std::endl << std::endl << out2.str() << std::endl; // Exclude GTV from Statistics by removing it from brain mask, // insert GTV as tumor region, since it is known before, in the result. { mitk::DataCollectionImageIterator<unsigned char, 3> gtvIt(testCollection, "GTV"); mitk::DataCollectionImageIterator<unsigned char, 3> brainMaskIter(testCollection, "BRAINMASK"); mitk::DataCollectionImageIterator<unsigned char, 3> result(testCollection, "RESULTOPEN"); while (!gtvIt.IsAtEnd()) { if (gtvIt.GetVoxel() != 0) { brainMaskIter.SetVoxel(0); result.SetVoxel(2); } result++; gtvIt++; brainMaskIter++; } } mitk::CollectionStatistic stat; stat.SetCollection(testCollection); stat.SetClassCount(2); stat.SetGoldName("TARGET"); stat.SetTestName("RESULTOPEN"); stat.SetMaskName("BRAINMASK"); stat.SetGroundTruthValueToIndexMapper(&binaryValueToIndexMapper); stat.SetTestValueToIndexMapper(&progressionValueToIndexMapper); stat.Update(); stat.ComputeRMSD(); // WARN: DICE value computed within Print method, so values are only available // after // calling Print() std::ostringstream out; stat.Print(out, std::cout, true); std::cout << std::endl << std::endl << out.str() << std::endl; // Statistics for original GTV excluded (Dice,Sensitivity) and for Gold // Standard vs prediction (RMSE) mitk::StatisticData statData = stat.GetStatisticData(1).at(0); mitk::StatisticData statData2 = stat2.GetStatisticData(1).at(0); std::cout << "Writing Stats to file" << std::endl; // one line output if (useStatsFile) { experimentFS << "Tree_Depth " << treeDepth << ','; experimentFS << "Forest_Size " << forestSize << ','; experimentFS << "Tumor/healthy_ratio " << ratio << ','; experimentFS << "Sample_Selection " << samplingMode << ','; experimentFS << "Trainined_on: " << ','; for (unsigned int i = 0; i < trainingIds.size(); i++) { experimentFS << trainingIds.at(i) << "/"; } experimentFS << ','; experimentFS << "Tested_on: " << ','; for (unsigned int i = 0; i < testingIds.size(); i++) { experimentFS << testingIds.at(i) << "/"; } experimentFS << ','; experimentFS << "Features_used: " << ','; if (configName == "") { for (unsigned int i = 0; i < features.size(); i++) { experimentFS << features.at(i) << "/"; } } else experimentFS << configName; experimentFS << ','; experimentFS << "---- STATS ---" << ','; experimentFS << " Sensitivity " << statData.m_Sensitivity << ','; experimentFS << " DICE " << statData.m_DICE << ','; experimentFS << " RMSE " << statData2.m_RMSD << ','; experimentFS << std::endl; } if (outputFolder != "") { std::cout << "Saving files to " << outputFolder << std::endl; mitk::DiffusionCollectionWriter::ExportCollectionToFolder(testCollection, outputFolder); } return EXIT_SUCCESS; }
def adjusted_classes(y_probs, cnn=False, combined=None, cnn_thresh=None, voting_thresh=None): if cnn: p = cnn_thresh return ["MEDLINE" if y >= p else "PubMed-not-MEDLINE" for y in y_probs] elif combined is not None: p = combined return ["MEDLINE" if y >= p else "PubMed-not-MEDLINE" for y in y_probs] else: p = voting_thresh return ["MEDLINE" if y >= p else "PubMed-not-MEDLINE" for y in y_probs]
Research on the Relationship Between Government Subsidies and High-quality Development of Enterprises This article takes China's listed manufacturing companies from 2014 to 2019 as the research object, studies the relationship between government subsidies and high-quality development of enterprises, and introduces the ownership structure to establish interaction items, and explores the impact of the ownership structure on the relationship between the two. Research indicates: There is a negative correlation between government subsidies and the high-quality development of enterprises, and the concentration of equity will positively regulate the inhibitory effect of government subsidies on the high-quality development of China's enterprises. The degree of equity checks and balances will inhibit the negative relationship between the two.
Tumor Necrosis Factor-Alpha and Interleukin 6 in Human Periapical Lesions Aim. The aim of this study was to evaluate the presence of the cytokines tumor necrosis factor-alpha (TNF-) and interleukin-6 (IL-6) in human periapical lesions. Subjects and methods. Samples were obtained from three groups of teeth: symptomatic teeth, asymptomatic lesions, and uninflamed periradicular tissues as a control. Results. TNF-alpha levels were significantly increased in symptomatic lesions compared to control. Group with asymptomatic lesions had significantly higher concentrations compared to control. There were no significant differences in TNF-alpha levels between symptomatic and asymptomatic lesions. In group with symptomatic lesions, IL-6 levels were significantly higher than in group with asymptomatic lesions. The IL-6 levels in symptomatic group also showed significantly higher concentration in comparison with control group. In asymptomatic group, the IL-6 level had significantly higher concentrations compared to control. Conclusion. These results indicate that symptomatic lesions represent an immunologically active stage of disease, and asymptomatic lesions are the point from which the process advances toward healing. INTRODUCTION Periapical inflammatory lesions are a frequent pathology and, in most cases, a consequence of dental caries. This type of lesion develops as an immune reaction triggered by the presence of bacteria in the root canal and bacterial toxins in the periapical region. After microbial invasion of periapical tissues, both nonspecific and specific immunologic responses persist in the host tissues. This inflammatory process ultimately results in destruction of the alveolar bone surrounding the tooth. It is characterized by the presence of immunocompetent cells producing a wide variety of inflammatory mediators. TNF-alpha is a soluble mediator and is released from immunocompetent cells in inflammatory processes. TNF-alpha plays an important role in initiating and coordinating the cellular events that make up the immune system's response to infection. The biological effects of TNF-alpha include activation of leukocytes such as lymphocytes (T and B cells), macrophages, and natural killer cells; fever induction; acute-phase protein release; cytokine and chemokine gene expression; and endothelial cell activation. IL-6 is a pleotropic cytokine that influences the antigen-specific immune responses and inflammatory reactions. It stimulates the formation of osteoclast precursors from colony-forming unit-granulocyte-macrophage and increases number of osteoclasts in vivo, leading to systemic increase in bone resorption. Emerging data suggests that IL-6 also has significant anti-inflammatory activities. Together with IL-1 and TNF (which also stimulate IL-6 secretion), it belongs to the group of main proinflammatory cytokines. The inflammatory response in the persisting apical lesion protects the host from further microbial invasion. The pathogenic pathways linking infection with development of a periapical lesion and concomitant bone resorption are not fully understood. Large numbers of immunocompetent cells such as macrophages, activated T and B cells and plasma cells synthesizing all classes of immunoglobulins are present in periapical lesions. The various activities of IL-6 and TNF-alpha suggest that these factors could play a major role in mediation of the inflammatory and immune responses initiated by infection or injury. The aim of this study was to determine the TNFalpha and IL-6 levels in symptomatic and asymptomatic 2 Mediators of Inflammation periapical lesions using an enzyme-linked immunosorbent assay (ELISA) in surgically removed human periapical lesions. SUBJECTS AND METHODS A total number of 45 teeth were included in this study. The teeth were divided into three groups. Group 1 consisted of lesions from 15 teeth that had been diagnosed as symptomatic. Teeth were put in this group based on the following criteria: clinical and radiographic examination that determined the existence of periradicular pathosis involving destruction of cortical bone and painful sensitivity to percussion and/or palpation. Group 2 consisted of lesions from 15 teeth that had been diagnosed as asymptomatic. A diagnosis was made based on the following criteria: clinical and radiographic examination that determined the existence of periradicular pathosis involving destruction of cortical bone, no or slight sensitivity to percussion. Group 3 consisted of uninflamed periradicular tissues that were obtained from periapical regions of 15 unerupted and incompletely formed third molars. The teeth included in this group had to meet the following criteria: a verbal history confirming no history of pulpal pain, clinical and radiographic examination after extraction assuring that these teeth had no caries. Clinical examination was performed according to the standard clinical criteria. After informed consent had been obtained and medical, dental, and social histories collected, tissues were obtained by apicoectomy. The diameter of the lesions, determined on the radiographs, ranged from 2 mm to 16 mm. The surgery was performed with the patients under local anesthesia. The patients involved in this study had not suffered from any diseases requiring any form of medical treatment except for dental surgery. These patients did not receive any medications including salicylates, nonsteroid antiinflammatory drugs, or antibiotics for about 1 month prior to surgery. After excision of the lesion, each specimen was divided into two. One section was taken for histopathological evaluation and was stained with hematoxylin and eosin. Histological examination showed that 25 tissue samples were granulomas comprising of connective tissue with variable collagen density, inflammatory infiltrate predominantly of macrophages, lymphocytes, and groups of plasmocytes, polymorphonucleocytes and giant cells, as well as the presence of fibroangioblastic proliferation in variable degrees. Three lesions were diagnosed as scar tissue. Two lesions presented connective tissue with variable diffuse inflammatory infiltrate and cavity formation limited by continuous or discontinuous stratified squamous epithelium, and thus were considered inflammatory cysts. In the samples of symptomatic lesions, there was a presence of polymorphonuclear cells, more than in asymptomatic lesions. Before homogenization every sample was weighed. For cytokine analysis, the tissue was cut up finely with scissors and homogenized in a glass tissue grinder with a Teflon plunge. The elutions were performed at 4 C over a 30 minute period with mixing before centrifugation for 2 minutes at 9880 g. The concentrations of TNF-alpha and IL-6 were analyzed with a commercial enzyme-linked immunosorbent assay kit (ELISA; R&D, Minneapolis, Minn, USA). The assay was performed according to the manufacturer's instructions and the results are expressed in pg/mL. The detection limit for TNF-alpha was 4.4 pg/mL and 1.4 pg/mL for IL-2, respectively. Results of the protein content were expressed in log 10 pg/mL. All subjects were informed of the aims and procedures of research, as well as of the fact that their medical data would be used in research. Within the research they were guaranteed respect of their basic ethical and bioethical principlespersonal integrity (independence, righteousness, well-being, and safety) as regulated by Nrnberg codex and the most recent version of Helsinki declaration. Only those subjects who have given a written permission in form of informed consent were included. STATISTICAL ANALYSIS Data are presented as median values, interquartile range (IQR) and on a logarithmic scale. The results obtained were compared using the nonparametric Kruskal-Wallis test and Mann-Whitney as a post-hoc test. All statistical values were considered significant at the P level of.05. Statistical analysis of data was performed by using Statistica for Windows, release 6.1 (StaSoft Inc., Tulsa, Okla, USA). RESULTS This study quantified the levels of TNF-alpha and IL-6 in symptomatic and asymptomatic human periapical lesions. Lesions were also categorized by the size and histological findings. The levels of TNF-alpha and IL-6 were measured in the symptomatic and asymptomatic human periapical lesions as well as in the control group, and are presented as the median (IQR) on a logarithmic scale in Figures 1 and 2. Median value for TNF-alpha in the symptomatic group: 4.47 (IQR: 4.44-4.61) pg/mL was significantly higher (P <.001) compared to 3.96 (IQR: 3.86-4.10) pg/mL in control group. Median value of TNF-alpha in the asymptomatic group: 4.46 (IQR: 4.28-4.60) pg/m was also significantly higher (P <.001), compared to 3.96 (IQR: 3.86-4.10) pg/mL in control group. There was no significant difference in TNF-alpha level between symptomatic and asymptomatic lesions: median 4.48 pg/mL versus 3.91 pg/mL (P =.418). (Figure 1). DISCUSSION Periapical lesions usually result from a persistent inflammatory response induced by prolonged exposure of periapical tissues to various microbial agents, evoking an immunological reaction. In this local defense mechanism, various inflammatory mediators, in particular inflammatory cytokines IL-6 and TNF-alpha, play a complex and central role in regulation of the immune response. The immune complex is formed by cells whose main function is to recognize antigens that penetrate the organism and to neutralize and/or destroy them. IL-6 has many molecular forms and each molecule has a different function when secreted by different cells in distinct situations (activated through diverse stimuli). Several studies have shown that both humoral and cellular immune responses play important roles in the pathogenesis of periapical lesions. The cytokine expression has been investigated in periapical lesions, however, the role that these molecules may play in the pathogenesis of the disease has not been well established. The inflammatory cytokines IL-6 and TNF-alpha have been demonstrated to have the capacity to activate osteoclastic bone resorption. The mediators involved in the inflammatory process and bone resorption appear to be more complex. Thus, human and animal studies have demonstrated the active participation of other cytokines, such as TNF-alpha, IL-6, IL-3, GM-CSF, IL-11, IL-17, and IL-18, which have shown their potential role in the pathogenesis of osteolytic diseases. These cytokines might be acting synergistically with IL-1, promoting activation/differentiation of osteoclasts and production/secretion of prostaglandins by many cell types, including fibroblasts and osteoblasts. IL-6 has traditionally been considered to be a proinflammatory mediator, since it is induced by IL-1 and TNF-alpha early in the inflammatory cascade, and because it stimulates expression of acutephase proteins. Our results demonstrate the presence of IL-6 in the vast majority of tissue samples and are in agreement with those from previous studies. It has been reported that cystic growth may be due to the autocrine stimulation of cyst epithelial cell proliferation by TNF-alpha and IL-6, and the osteolytic activity of these cytokines, causing local bone loss. In this study, the concentrations of IL-6 in the symptomatic lesions were statistically significantly in correlation with asymptomatic lesions and control group. These results suggest that these lesions may represent an active state of inflammatory periradicular disease which has already been confirmed. The plasma concentration of IL-6 has also been reported to correlate with severity of infection in certain clinical pathologic conditions. The levels of IL-6 have also been measured in the patients with atypical painful disorders in orofacial region, and their levels were significantly greater than those in control group. Results from the previous studies confirm the results obtained in this study, since it has been proved at different levels that IL-6 is significantly increased in certain infections and painful conditions. In researches performed to the present date, it has been proved that both IL-6 and TNF-alpha are produced in response to infectious organisms, in vitro and in vivo conditions. Once produced, they could exert a beneficial or deleterious effect, depending on the quantity in which they are produced and the time period over which production is sustained. In our study, TNF-alpha and IL-6 were detected in all of the periapical samples. Highest concentration of TNFalpha was detected in symptomatic and asymptomatic lesions, while lowest TNF-alpha concentration was found in healthy samples. Somewhat greater concentration was found in symptomatic lesions, but there was no statistically significant difference between symptomatic and asymptomatic groups. Statistically significant difference was found in symptomatic and asymptomatic groups in comparison with the control group shows clearly that TNF-alpha is an important bone-resorptive mediator and its elevated levels have far-reaching systemic consequences. Inflammatory cytokines also play a part in the modulation of pain by interfering with nociceptive transduction, conduction, and transmission. This modulation may result from alteration of the transcription rate and post-translational changes in proteins involved in the pain pathway. In the previous study, an important role was assigned to IL-6 in the physiology of nociception and the pathophysiology of pain. Because of this fact, we wanted to analyze the correlation between tissue cytokine levels and characteristic features of the lesions, such as symptoms. Samples in Group 1 represent a symptomatic group. The elevated IL-6 and TNF-alpha levels in this group suggest that these lesions could represent an active state of inflammatory periradicular disease. In Group 2 which is the asymptomatic group, we found significantly elevated levels of both cytokines, compared to the control group. However, the difference in IL-6 level between this group and both symptomatic and control groups was statistically significant. These results suggest that inflammatory reaction is less intense in tissues categorized in Group 2. The present results suggest that a chronic bacterial challenge from infected root canal causes the expression of two important cytokines, TNF-alpha and IL-6, which play a key role in periapical pathogenesis as potent bone resorptionstimulating mediators.
North Carolina whistleblower Mary Willingham and former football player Deunta Williams discuss the fake classes that student-athletes were allegedly encouraged to take in order to maintain eligibility to play. (4:21) A report commissioned by the University of North Carolina says school academic advisers steered athletes into sham classes over an 18-year period but does not directly implicate coaches or athletic administrators in the scheme. The report, released Wednesday, says academic advisers in North Carolina's athletic department colluded with a manager in the African and Afro-American Studies department for student-athletes to take classes to boost their GPAs and keep them eligible in their respective sports. University of North Carolina president Tom Ross, left, and chancellor Carol Folt talk at Wednesday's meeting. AP Photo/Gerry Broome The classes, in place from 1993 to 2011, were overseen by Debby Crowder, the longtime manager in the African and Afro-American Studies department, and later by the department chairman. They allowed a student to write a paper of at least 10 pages rather than attend lectures or meet with professors. The papers were graded by Crowder, who was not a professor. They typically earned an A or B-plus grade. The report, the third and most comprehensive produced in the matter, said some academic advisers in the school's Academic Support Program for Student Athletes had ties to Crowder and let her know how high a student's grade needed to be to maintain a 2.0 GPA to be eligible to play. It also said that those advisers pushed Crowder to make exceptions for athletes, including allowing them to enroll in classes after the registration period had ended. The ASPSA is not part of the athletic department but is located in the same offices. The report says it clearly steered players to the sham classes. When Crowder retired in 2009, Julius Nyang'oro, the former chairman of the African and Afro-American Studies department, was urged to maintain the program. He was forced to retire in 2012 and was charged with fraud for holding summer classes that didn't exist. Those charges were dropped when he agreed to cooperate with the investigation. The report was conducted over eight months and included 126 interviews, with Kenneth Wainstein, a former federal prosecutor and FBI counsel who now works for a prestigious Washington, D.C., law firm, leading the investigation. The report did not find involvement at the highest levels of the university's administration, but it does fault the school for missing a number of red flags. Editor's Picks McCants says he took sham classes at UNC From June 1014: Former North Carolina star Rashad McCants told Steve Delsohn of "Outside the Lines" that he could have been academically ineligible to play during the '04-05 national title season had he not been provided fraudulent academic assistance. "The Crowder/Nyang'oro scheme marked a horrible chapter in the history of this great university," North Carolina president Thomas W. Ross said Wednesday. Investigators found a number of academic advisers saw these classes as "GPA boosters," according to Wainstein. "Coaches knew there were easy classes," said Wainstein, who added that there was no evidence that coaches or administrators, other than those in APSPA, knew Crowder was grading the course rather than a professor. School officials said Wednesday that they consider the matter an academic issue as well as an athletic one. "From the beginning, the university has taken the position that these classes started in an academic department by a person employed by academic side of university ... and the athletic department took advantage of it," Ross said. The university is not in the clear just yet. The NCAA re-opened its investigation in June after determining "additional people with information and others who were previously uncooperative might be willing to speak." "The intent was really to get to the bottom of what occurred, and I think we did," UNC athletic director Bubba Cunningham said, "but we are in the middle of a joint review and investigation with the NCAA. So this is just one piece of that process, but it was helpful to bring closure to the campus issue." Cunningham did not want to speculate on how long that investigation would continue, but added that the findings did not prompt a new round of self-sanctions by the school. The report also detailed a 2009 meeting that academic advisers held with the North Carolina football staff. The meeting, which came as Crowder was retiring, included a slide that noted that the classes were "part of the solution in the past" and allowed athletes not to go to class, not to take notes, not to meet with professors and not to engage with the material. Butch Davis, the UNC football coach at the time, said he did not remember the presentation and said that while he was aware there were classes that were easier, he did not know that the seminar courses were graded by an administrator. The classes, which no longer exist, were available to all students. More than 3,000 participated; student-athletes accounted for 48 percent of the people who took them. The report says Crowder and Nyang'oro told investigators that they believed the UNC administration "wanted them to provide this assistance to the student-athletes." Crowder and Nyang'oro cited the administration's inaction over the years, and Nyang'oro cited comments he received from administrators and faculty suggesting their approval. But the investigators said they found no evidence of that. "Like everyone who reads it, I feel shocked and disappointed," UNC-Chapel Hill chancellor Carol Folt said in a statement. She blamed what she called "the actions of a small number and inactions of many people" for the problem and said the university had implemented more than 70 improvements to its academic oversight, including personnel changes within the school. Four individuals who were implicated in the report were fired and others were under disciplinary review. "When we find people who are accountable, we will take decisive action," she said. Folt said the school will cooperate with the NCAA, which has not issued any sanctions. "I can't preempt their investigation or speculate about it," she said. The NCAA issued a joint statement with UNC later Wednesday: "The University of North Carolina-Chapel Hill and the NCAA enforcement staff continue to engage in an independent and cooperative effort to review information of possible NCAA rules violations as announced earlier this year. The university provided the enforcement staff with a copy of the Wainstein Report for its consideration. The information included in the Wainstein Report will be reviewed by the university and the enforcement staff under the same standards that are applied in all NCAA infractions cases. Due to rules put in place by the NCAA membership, neither the university nor the enforcement staff will comment on the substance of the report as it relates to possible NCAA rules violations." Investigators said they talked once to former UNC academic adviser Mary Willingham, who questioned the literacy level of Tar Heels athletes and said UNC had committed academic misconduct before leaving the job in 2010. A report that men's basketball coach Roy Williams told Willingham her only job was to keep his players eligible was not verified; Williams said he didn't believe he had met Willingham, and Willingham, who filed a civil suit against the university in June, did not talk to investigators for a second time to answer that question. The report listed Wayne Walden -- the associate director of ASPSA and academic counselor for a number of sports, including men's basketball from 2003 to 2009, and who has worked closely with Williams at both Kansas and North Carolina -- as one of the counselors who "steered players into these paper classes." It said Walden and his predecessor, Burgess McSwain, "routinely called Crowder to arrange classes for their players." The report also said Walden later played a role in the basketball players' move away from the paper-class system. The report said Walden acknowledged knowing about irregular aspects of the paper classes, including that Crowder was doing at least some of the grading. It added that, when asked whether he shared this information with former UNC assistant and then director of basketball operations Joe Holladay or Williams, Walden could not recall doing so. Both coaches told investigators that they never learned from Walden or anyone else that there was a question about faculty involvement in the classes or that Crowder was doing the grading. "You had them [Williams and Holladay] trying to pull back on independent studies, because they wanted lecture classes. You had them pull back on Afam because he [Williams] didn't like the clustering," Wainstein said. "Those are actions that are inconsistent with being complicit or really trying to promote that scheme." The report said it was unable to corroborate allegations made by former basketball player Rashad McCants to ESPN's "Outside the Lines" that tutors wrote papers for him and his teammates. McCants did not agree to be interviewed for the investigation or offer details to support the claims, the report said. Seven players who played with McCants told investigators that they drafted their own papers and that tutors' involvement was limited to general suggestions and corrections. They said they took the classes because they were easy but that they did the work themselves. "This place is built on integrity," Cunningham said. "We need to provide a great education to students and I think we do that. We've lost trust and now we have to build back that trust." Information from "Outside The Lines" reporter Steve Delsohn is included in this report
Pamidronate and zoledronate effects in the increment of bone mineral density and histomorphometry in rats. PURPOSE To compare increment of bone mineral density (BMD) with pamidronate, zoledronate and the isolated effect of proteinous diet in undernourished oophorectomized and non-oophorectomized female rats, besides validating BMD's indexes. METHODS 60 young female Lewis rats were divided into five experimental groups and a control group, oophorectomized and non-oophorectomized. The administration of drugs were submitted to proteinous and aproteinous diets. The variables analyzed were weight, bone densitometry, histomorphometry and biochemical evolution. RESULTS In weight evaluation, the first interval showed a statistically meaningful increase in oophorectomized sample. In densitometry evaluation, the first interval showed statistically meaningful decrease in four medicated groups and third showed a statistically meaningful increase in 2 non-oophorectomized groups. In laboratory evaluation, there were an increase of total proteins and globulin, decrease of alkaline phosphatase, phosphorus and calcium (except for the oophorectomized) in four medicated groups and increase of phosphorus and calcium in 2 not medicated groups. In histomorphometric evaluation, the oophorectomized groups had smaller increment of BMD. CONCLUSIONS The pamidronate and zoledronate have shown effectives in the increment of BMD. The proteinous diet itself possesses therapeutic effect in BMD though not significant compared with medicated animals. The results of histomorphometry allow validating BMD's indexes in this experimental model.
Home » Compare Smartwatches » The Fitbit Surge smartwatch was leaked on a pre-order page While Fitbit’s smartwatch, also known by the name of Surge has been already leaked a couple of times until now, the watch which was still not officially announced was basically confirmed today over on a pre-order page that was available on Brookstone. We already had most of the details referring to the smartwatch lined out for people from all around the world to take a look at, but the details which appeared today basically reveal everything there is to know about the Surge. As analysts have already stated, the smartwatch will actually create a strong link between normal smartwatches and fitness tracking devices. With this in mind, while it may have the design of a watch, it also inspired ‘sporting accessory’ when looked at. While it does have a touchscreen that is big enough to display notifications, incoming calls, music information and controls, these represent most of the actual smartwatch features that this device will provide, as reports have stated that instead of running Android Wear like it was expected to at first, it actually runs a software which was created by the team over at Fitbit. As it will play the role of a fitness accessory, people will also be able to use the Surge as a running accessory without needing to actually keep a smartphone around. Together with this, the smartwatch also offers a heartbeat monitor and the ability to track calories burned, steps taken and so on, although it doesn’t offer anything too fancy. What’s more interesting is the price of this device. The pre-order page mentioned earlier said that the smartwatch may be available at around $250, which may be quite high for what it currently offers. As no actual review of the smartwatch was made until now, it will take some more time before an actual answer may be given to the ‘is it worth it’ question, but so far I may have expected a little bit more! With all of these factors in mind, the Fitbit Surge does seem like an interesting smartwatch to have around. What do you personally make of it? Would you consider purchasing it or another smartwatch? Why or why not?
def configure_trigger(self, cam): print('*** CONFIGURING TRIGGER ***\n') if CHOSEN_TRIGGER == TriggerType.SOFTWARE: print('Software trigger chosen...') elif CHOSEN_TRIGGER == TriggerType.HARDWARE: print('Hardware trigger chose...') cam.TriggerMode.SetValue(PySpin.TriggerMode_Off) cam.TriggerSource.SetValue(PySpin.TriggerSelector_FrameStart) if CHOSEN_TRIGGER == TriggerType.SOFTWARE: cam.TriggerSource.SetValue(PySpin.TriggerSource_Software) elif CHOSEN_TRIGGER == TriggerType.HARDWARE: cam.TriggerSource.SetValue(PySpin.TriggerSource_Line0) if False: cam.TriggerOverlap.SetValue(PySpin.TriggerOverlap_ReadOut) cam.TriggerMode.SetValue(PySpin.TriggerMode_On)
Conference registration includes all the sessions, the business meeting, and refreshments at breaks from Sunday, June 2 through Tuesday, June 4. The price for registration is the same whether or not you attend the tutorials and workshops on Saturday, June 1, but you will need to specify when you register if you will be attending the Saturday tutorials and workshops. All prices are in US dollars. Early registration ends Saturday, May 3, 2013. After this the higher registration fees will apply. Payment is accepted by credit card (Visa, Mastercard, American Express). Note that after registering, you can return to the registration page and update your registration information or cancel your registration. If you have questions about the use of the registration system or experience any problems, please send email to the conference organizers. You can find their contact information under the Contacts tab. Please make sure to also reserve a hotel room.
(Reuters) - A tornado killed a seven-day-old baby and injured more than two dozen people when it ripped through a trailer park in North Dakota and forecasters warned that parts of the Midwestern United States could face more twisters on Wednesday. The tornado, with wind speeds around 127 miles per hour (204 kph), hit a trailer home park on Tuesday in the southwest part of Watford City, North Dakota, about 180 miles (290 km) northwest of Bismarck, destroying many mobile homes, the National Weather Service said. A male baby was severely injured when the storm hit his family’s home and later died in hospital, the McKenzie County Sheriff’s Office said in a statement late on Tuesday. The office did not identify the baby. NWS weather forecaster Marc Chenard warned that tornadoes could hit portions of central and northern Minnesota and portions of western Wisconsin on Wednesday. “There’s a threat of a few tornadoes and potential of large hail and a threat of flash flooding for the same areas mainly from this evening into early Thursday,” Chenard said. About 28 trailer park residents were also injured when the storm hit Watford City. They were taken to McKenzie County Hospital, with at least three being transported by aircraft and six listed in critical condition, the sheriff’s office said in a statement. A representative from the McKenzie County Sheriff’s office did not immediately respond to requests for comment on Wednesday. Severe wind threats will shift south by Thursday and threats of storms will then impact portions of southern Minnesota, northern Iowa and central Wisconsin. Chenard said that the storm has moved out of the North Dakota area. North Dakota Governor Doug Burgum visited Watford City on Tuesday to survey areas hit by the tornado. He met with local officials and people who were displaced by the storm and were staying in local shelters, the governor’s office said in a statement. The NWS rated the North Dakota tornado an EF-2, the second-strongest on the five-step Enhanced Fujita scale.
package handlers import ( "encoding/json" "fmt" "net/http" "strconv" "strings" "time" "github.com/go-chi/chi" "github.com/enesanbar/workspace/projects/bookings/internal/driver" "github.com/enesanbar/workspace/projects/bookings/internal/repository" "github.com/enesanbar/workspace/projects/bookings/internal/repository/dbrepo" "github.com/enesanbar/workspace/projects/bookings/internal/helpers" "github.com/enesanbar/workspace/projects/bookings/internal/config" "github.com/enesanbar/workspace/projects/bookings/internal/forms" "github.com/enesanbar/workspace/projects/bookings/internal/models" "github.com/enesanbar/workspace/projects/bookings/internal/render" ) // Repo the repository used by the handlers var Repo *Repository // Repository is the repository type type Repository struct { App *config.AppConfig DB repository.DatabaseRepo } // NewRepo creates a new repository func NewRepo(a *config.AppConfig, db *driver.DB) *Repository { return &Repository{ App: a, DB: dbrepo.NewPostgresRepo(db.SQL, a), } } // NewTestRepo creates a new test repository func NewTestRepo(a *config.AppConfig) *Repository { return &Repository{ App: a, DB: dbrepo.NewTestDBRepo(a), } } // NewHandlers sets the repository for the handlers func NewHandlers(r *Repository) { Repo = r } // Home is the handler for the home page func (m *Repository) Home(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "home.page.tmpl", &models.TemplateData{}) } // About is the handler for the about page func (m *Repository) About(w http.ResponseWriter, r *http.Request) { // send data to the template render.Template(w, r, "about.page.tmpl", &models.TemplateData{}) } // Reservation renders the make a reservation page and displays form func (m *Repository) Reservation(w http.ResponseWriter, r *http.Request) { reservation, ok := m.App.Session.Get(r.Context(), "reservation").(models.Reservation) if !ok { m.App.Session.Put(r.Context(), "error", "cannot get reservation from user") http.Redirect(w, r, "/", http.StatusSeeOther) return } room, err := m.DB.GetRoomByID(reservation.RoomID) if err != nil { m.App.Session.Put(r.Context(), "error", "cannot find room") http.Redirect(w, r, "/", http.StatusSeeOther) return } reservation.Room.RoomName = room.RoomName m.App.Session.Put(r.Context(), "reservation", reservation) sd := reservation.StartDate.Format("2006-01-02") ed := reservation.EndDate.Format("2006-01-02") render.Template(w, r, "make-reservation.page.tmpl", &models.TemplateData{ Form: forms.New(nil), StringMap: map[string]string{ "start_date": sd, "end_date": ed, }, Data: map[string]interface{}{ "reservation": reservation, }, }) } // PostReservation handles the posting of a reservation form func (m *Repository) PostReservation(w http.ResponseWriter, r *http.Request) { err := r.ParseForm() if err != nil { m.App.Session.Put(r.Context(), "error", "can't parse form!") http.Redirect(w, r, "/", http.StatusSeeOther) return } sd := r.Form.Get("start_date") ed := r.Form.Get("end_date") // 2020-01-01 -- 01/02 03:04:05PM '06 -0700 layout := "2006-01-02" startDate, err := time.Parse(layout, sd) if err != nil { m.App.Session.Put(r.Context(), "error", "can't parse start date") http.Redirect(w, r, "/", http.StatusSeeOther) return } endDate, err := time.Parse(layout, ed) if err != nil { m.App.Session.Put(r.Context(), "error", "can't get parse end date") http.Redirect(w, r, "/", http.StatusSeeOther) return } roomID, err := strconv.Atoi(r.Form.Get("room_id")) if err != nil { m.App.Session.Put(r.Context(), "error", "invalid data!") http.Redirect(w, r, "/", http.StatusSeeOther) return } room, err := m.DB.GetRoomByID(roomID) if err != nil { m.App.Session.Put(r.Context(), "error", "could not get room info") http.Redirect(w, r, "/", http.StatusSeeOther) return } reservation := models.Reservation{ FirstName: r.Form.Get("first_name"), LastName: r.Form.Get("last_name"), Phone: r.Form.Get("phone"), Email: r.Form.Get("email"), StartDate: startDate, EndDate: endDate, RoomID: roomID, Room: room, } form := forms.New(r.PostForm) form.Required("first_name", "last_name", "email") form.MinLength("first_name", 3) form.IsEmail("email") if !form.Valid() { data := make(map[string]interface{}) data["reservation"] = reservation render.Template(w, r, "make-reservation.page.tmpl", &models.TemplateData{ Form: form, Data: data, StringMap: map[string]string{ "start_date": sd, "end_date": ed, }, }) return } newReservationID, err := m.DB.InsertReservation(reservation) if err != nil { m.App.Session.Put(r.Context(), "error", "can't insert reservation into database!") http.Redirect(w, r, "/", http.StatusSeeOther) return } restriction := models.RoomRestriction{ StartDate: startDate, EndDate: endDate, RoomID: roomID, ReservationID: newReservationID, RestrictionID: 1, } err = m.DB.InsertRoomRestriction(restriction) if err != nil { m.App.Session.Put(r.Context(), "error", "can't insert room restriction!") http.Redirect(w, r, "/", http.StatusSeeOther) return } htmlMessage := fmt.Sprintf(` <strong>Reservation Confirmation</strong><br> Dear %s:, <br> This confirm your reservation from %s to %s. `, reservation.FirstName, reservation.StartDate.Format("2006-01-02"), reservation.EndDate.Format("2006-01-02"), ) // send notification msg := models.MailData{ To: reservation.Email, From: "<EMAIL>", Subject: "Reservation confirmation", Content: htmlMessage, Template: "basic.html", } m.App.MailChan <- msg m.App.Session.Put(r.Context(), "reservation", reservation) http.Redirect(w, r, "/reservation-summary", http.StatusSeeOther) } // Generals renders the room page func (m *Repository) Generals(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "generals.page.tmpl", &models.TemplateData{}) } // Majors renders the room page func (m *Repository) Majors(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "majors.page.tmpl", &models.TemplateData{}) } // Availability renders the search availability page func (m *Repository) Availability(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "search-availability.page.tmpl", &models.TemplateData{}) } // PostAvailability renders the search availability page func (m *Repository) PostAvailability(w http.ResponseWriter, r *http.Request) { err := r.ParseForm() if err != nil { m.App.ErrorLog.Println("can't parse form!", err) m.App.Session.Put(r.Context(), "error", "can't parse form!") http.Redirect(w, r, "/", http.StatusSeeOther) return } sd := r.Form.Get("start_date") ed := r.Form.Get("end_date") // 01/02 03:04:05PM '06 -0700 dateLayout := "2006-01-02" startDate, err := time.Parse(dateLayout, sd) if err != nil { m.App.ErrorLog.Println("can't parse start date!", err) m.App.Session.Put(r.Context(), "error", "can't parse start date!") http.Redirect(w, r, "/", http.StatusSeeOther) return } endDate, err := time.Parse(dateLayout, ed) if err != nil { m.App.ErrorLog.Println("can't parse end date!", err) m.App.Session.Put(r.Context(), "error", "can't parse end date!") http.Redirect(w, r, "/", http.StatusSeeOther) return } rooms, err := m.DB.SearchAvailabilityForAllRooms(startDate, endDate) if err != nil { m.App.ErrorLog.Println("can't get availability for rooms", err) m.App.Session.Put(r.Context(), "error", "can't get availability for rooms") http.Redirect(w, r, "/", http.StatusSeeOther) return } // availability if len(rooms) == 0 { m.App.ErrorLog.Println("No availability") m.App.Session.Put(r.Context(), "error", "No availability") http.Redirect(w, r, "/search-availability", http.StatusSeeOther) return } reservation := models.Reservation{ StartDate: startDate, EndDate: endDate, } m.App.Session.Put(r.Context(), "reservation", reservation) render.Template(w, r, "choose-rooms.page.tmpl", &models.TemplateData{ Data: map[string]interface{}{ "rooms": rooms, }, }) } type jsonResponse struct { OK bool `json:"ok,omitempty"` Message string `json:"message,omitempty"` RoomId string `json:"room_id"` StartDate string `json:"start_date"` EndDate string `json:"end_date"` } // AvailabilityJSON handles request for availability and send JSON response func (m *Repository) AvailabilityJSON(w http.ResponseWriter, r *http.Request) { // need to parse request body err := r.ParseForm() if err != nil { // can't parse form, so return appropriate json resp := jsonResponse{ OK: false, Message: "Internal server error", } out, _ := json.Marshal(resp) w.Header().Set("Content-Type", "application/json") w.Write(out) return } sd := r.Form.Get("start_date") ed := r.Form.Get("end_date") layout := "2006-01-02" startDate, _ := time.Parse(layout, sd) endDate, _ := time.Parse(layout, ed) roomID, _ := strconv.Atoi(r.Form.Get("room_id")) available, err := m.DB.SearchAvailabilityByDatesByRoomId(startDate, endDate, roomID) if err != nil { // got a database error, so return appropriate json resp := jsonResponse{ OK: false, Message: "Error querying database", } out, _ := json.Marshal(resp) w.Header().Set("Content-Type", "application/json") w.Write(out) return } resp := jsonResponse{ OK: available, Message: "", StartDate: sd, EndDate: ed, RoomId: strconv.Itoa(roomID), } // I removed the error check, since we handle all aspects of // the json right here out, _ := json.MarshalIndent(resp, "", " ") w.Header().Set("Content-Type", "application/json") w.Write(out) } // ChooseRoom renders choose room page func (m *Repository) ChooseRoom(w http.ResponseWriter, r *http.Request) { // used to have next 6 lines //roomID, err := strconv.Atoi(chi.URLParam(r, "id")) //if err != nil { // log.Println(err) // m.App.Session.Put(r.Context(), "error", "missing url parameter") // http.Redirect(w, r, "/", http.StatusSeeOther) // return //} // changed to this, so we can test it more easily // split the URL up by /, and grab the 3rd element exploded := strings.Split(r.RequestURI, "/") roomID, err := strconv.Atoi(exploded[2]) if err != nil { m.App.Session.Put(r.Context(), "error", "missing url parameter") http.Redirect(w, r, "/", http.StatusSeeOther) return } res, ok := m.App.Session.Get(r.Context(), "reservation").(models.Reservation) if !ok { m.App.Session.Put(r.Context(), "error", "Can't get reservation from session") http.Redirect(w, r, "/", http.StatusSeeOther) return } res.RoomID = roomID m.App.Session.Put(r.Context(), "reservation", res) http.Redirect(w, r, "/make-reservation", http.StatusSeeOther) } // BookRoom renders book room page func (m *Repository) BookRoom(w http.ResponseWriter, r *http.Request) { roomID, err := strconv.Atoi(r.URL.Query().Get("id")) if err != nil { helpers.ServerError(w, err) return } sd := r.URL.Query().Get("s") ed := r.URL.Query().Get("e") layout := "2006-01-02" startDate, err := time.Parse(layout, sd) if err != nil { m.App.Session.Put(r.Context(), "error", "Can't parse start date!") http.Redirect(w, r, "/", http.StatusSeeOther) return } endDate, err := time.Parse(layout, ed) if err != nil { m.App.Session.Put(r.Context(), "error", "Can't parse end date!") http.Redirect(w, r, "/", http.StatusSeeOther) return } room, err := m.DB.GetRoomByID(roomID) if err != nil { m.App.Session.Put(r.Context(), "error", "Can't get room from db!") http.Redirect(w, r, "/", http.StatusSeeOther) return } reservation := models.Reservation{ StartDate: startDate, EndDate: endDate, RoomID: roomID, Room: models.Room{ RoomName: room.RoomName, }, } m.App.Session.Put(r.Context(), "reservation", reservation) http.Redirect(w, r, "/make-reservation", http.StatusSeeOther) } // Contact renders the contact page func (m *Repository) Contact(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "contact.page.tmpl", &models.TemplateData{}) } // ReservationSummary renders the reservation summary page func (m *Repository) ReservationSummary(w http.ResponseWriter, r *http.Request) { reservation, ok := m.App.Session.Pop(r.Context(), "reservation").(models.Reservation) if !ok { m.App.ErrorLog.Println("cannot get item from session") m.App.Session.Put(r.Context(), "error", "Cannot get reservation from session") http.Redirect(w, r, "/", http.StatusSeeOther) return } m.App.Session.Remove(r.Context(), "reservation") sd := reservation.StartDate.Format("2006-01-02") ed := reservation.EndDate.Format("2006-01-02") render.Template(w, r, "reservation-summary.page.tmpl", &models.TemplateData{ StringMap: map[string]string{ "start_date": sd, "end_date": ed, }, Data: map[string]interface{}{ "reservation": reservation, }, }) } func (m *Repository) ShowLogin(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "login.page.tmpl", &models.TemplateData{ Form: forms.New(nil), }) } func (m *Repository) PostLogin(w http.ResponseWriter, r *http.Request) { err := m.App.Session.RenewToken(r.Context()) if err != nil { m.App.ErrorLog.Println("cannot renew session token, ", err) return } err = r.ParseForm() if err != nil { m.App.ErrorLog.Println("cannot parse form ", err) return } form := forms.New(r.PostForm) form.Required("email", "password") form.IsEmail("email") if !form.Valid() { render.Template(w, r, "login.page.tmpl", &models.TemplateData{ Form: form, }) return } email := r.Form.Get("email") password := r.Form.Get("password") id, _, err := m.DB.Authenticate(email, password) if err != nil { m.App.ErrorLog.Println("cannot authenticate the user ", err) m.App.Session.Put(r.Context(), "error", "invalid login credentials") http.Redirect(w, r, "/user/login", http.StatusSeeOther) return } m.App.Session.Put(r.Context(), "user_id", id) m.App.Session.Put(r.Context(), "flash", "logged in successfully") http.Redirect(w, r, "/", http.StatusSeeOther) } func (m *Repository) Logout(writer http.ResponseWriter, request *http.Request) { _ = m.App.Session.Destroy(request.Context()) _ = m.App.Session.RenewToken(request.Context()) m.App.Session.Put(request.Context(), "flash", "successfully logged out") http.Redirect(writer, request, "/user/login", http.StatusSeeOther) } // AdminDashboard renders admin dashboard func (m *Repository) AdminDashboard(w http.ResponseWriter, r *http.Request) { render.Template(w, r, "admin-dashboard.page.tmpl", &models.TemplateData{}) } // AdminNewReservations renders all new reservations in the admin dashboard func (m *Repository) AdminNewReservations(w http.ResponseWriter, r *http.Request) { reservations, err := m.DB.AllNewReservations() if err != nil { helpers.ServerError(w, err) return } render.Template(w, r, "admin-new-reservations.page.tmpl", &models.TemplateData{ Data: map[string]interface{}{ "reservations": reservations, }, }) } // AdminAllReservations renders all reservations in the admin dashboard func (m *Repository) AdminAllReservations(w http.ResponseWriter, r *http.Request) { reservations, err := m.DB.AllReservations() if err != nil { helpers.ServerError(w, err) return } render.Template(w, r, "admin-all-reservations.page.tmpl", &models.TemplateData{ Data: map[string]interface{}{ "reservations": reservations, }, }) } // AdminShowReservation renders a single reservation in the admin dashboard func (m *Repository) AdminShowReservation(w http.ResponseWriter, r *http.Request) { src := chi.URLParam(r, "src") id, err := strconv.Atoi(chi.URLParam(r, "id")) if err != nil { helpers.ServerError(w, err) return } year := r.URL.Query().Get("y") month := r.URL.Query().Get("m") reservation, err := m.DB.GetReservationByID(id) if err != nil { helpers.ServerError(w, err) return } render.Template(w, r, "admin-reservation-show.page.tmpl", &models.TemplateData{ Data: map[string]interface{}{ "reservation": reservation, }, StringMap: map[string]string{ "src": src, "year": year, "month": month, }, Form: forms.New(nil), }) } func (m *Repository) AdminPostShowReservation(w http.ResponseWriter, r *http.Request) { err := r.ParseForm() if err != nil { m.App.Session.Put(r.Context(), "error", "can't parse form!") http.Redirect(w, r, "/", http.StatusSeeOther) return } src := chi.URLParam(r, "src") id, err := strconv.Atoi(chi.URLParam(r, "id")) if err != nil { helpers.ServerError(w, err) return } reservation, err := m.DB.GetReservationByID(id) if err != nil { helpers.ServerError(w, err) return } reservation.FirstName = r.Form.Get("first_name") reservation.LastName = r.Form.Get("last_name") reservation.Email = r.Form.Get("email") reservation.Phone = r.Form.Get("phone") err = m.DB.UpdateReservation(reservation) if err != nil { helpers.ServerError(w, err) return } year := r.Form.Get("year") month := r.Form.Get("month") m.App.Session.Put(r.Context(), "flash", "changes saved") if year == "" { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-%s", src), http.StatusSeeOther) } else { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-calendar?y=%s&m=%s", year, month), http.StatusSeeOther) } } func (m *Repository) AdminReservationCalendar(w http.ResponseWriter, r *http.Request) { now := time.Now() if r.URL.Query().Get("y") != "" { year, _ := strconv.Atoi(r.URL.Query().Get("y")) month, _ := strconv.Atoi(r.URL.Query().Get("m")) now = time.Date(year, time.Month(month), 1, 0, 0, 0, 0, time.UTC) } next := now.AddDate(0, 1, 0) last := now.AddDate(0, -1, 0) nextMonth := next.Format("01") nextMonthYear := next.Format("2006") lastMonth := last.Format("01") lastMonthYear := last.Format("2006") // get first and last day of the month currentYear, currentMonth, _ := now.Date() currentLocation := now.Location() firstDayOfMonth := time.Date(currentYear, currentMonth, 1, 0, 0, 0, 0, currentLocation) lastDayOfMonth := firstDayOfMonth.AddDate(0, 1, -1) // get rooms rooms, err := m.DB.GetRooms() if err != nil { helpers.ServerError(w, err) return } data := map[string]interface{}{ "now": now, "rooms": rooms, } for _, room := range rooms { // initialize map for the month reservationMap := make(map[string]int) blockMap := make(map[string]int) for d := firstDayOfMonth; d.After(lastDayOfMonth) == false; d = d.AddDate(0, 0, 1) { reservationMap[d.Format("2006-01-2")] = 0 blockMap[d.Format("2006-01-2")] = 0 } restrictions, err := m.DB.GetRestrictionsForRoomByDate(room.ID, firstDayOfMonth, lastDayOfMonth) if err != nil { helpers.ServerError(w, err) return } for _, restriction := range restrictions { if restriction.ReservationID > 0 { for d := restriction.StartDate; !d.After(restriction.EndDate); d = d.AddDate(0, 0, 1) { reservationMap[d.Format("2006-01-2")] = restriction.ReservationID } } else { blockMap[restriction.StartDate.Format("2006-01-2")] = restriction.ID } } data[fmt.Sprintf("reservation_map_%d", room.ID)] = reservationMap data[fmt.Sprintf("block_map_%d", room.ID)] = blockMap m.App.Session.Put(r.Context(), fmt.Sprintf("block_map_%d", room.ID), blockMap) } render.Template(w, r, "admin-reservations-calendar.page.tmpl", &models.TemplateData{ StringMap: map[string]string{ "next_month": nextMonth, "next_month_year": nextMonthYear, "last_month": lastMonth, "last_month_year": lastMonthYear, "this_month": now.Format("01"), "this_month_year": now.Format("2006"), }, IntMap: map[string]int{ "days_in_month": lastDayOfMonth.Day(), }, Data: data, }) } func (m *Repository) AdminProcessReservation(w http.ResponseWriter, r *http.Request) { src := chi.URLParam(r, "src") id, err := strconv.Atoi(chi.URLParam(r, "id")) if err != nil { helpers.ServerError(w, err) return } err = m.DB.UpdateProcessedForReservation(id, 1) if err != nil { helpers.ServerError(w, err) return } year := r.URL.Query().Get("y") month := r.URL.Query().Get("m") m.App.Session.Put(r.Context(), "flash", "reservation marked as processed") if year == "" { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-%s", src), http.StatusSeeOther) } else { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-calendar?y=%s&m=%s", year, month), http.StatusSeeOther) } } func (m *Repository) AdminDeleteReservation(w http.ResponseWriter, r *http.Request) { src := chi.URLParam(r, "src") id, err := strconv.Atoi(chi.URLParam(r, "id")) if err != nil { helpers.ServerError(w, err) return } err = m.DB.DeleteReservationByID(id) if err != nil { helpers.ServerError(w, err) return } year := r.URL.Query().Get("y") month := r.URL.Query().Get("m") m.App.Session.Put(r.Context(), "flash", "reservation deleted") if year == "" { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-%s", src), http.StatusSeeOther) } else { http.Redirect(w, r, fmt.Sprintf("/admin/reservations-calendar?y=%s&m=%s", year, month), http.StatusSeeOther) } } func (m *Repository) AdminPostReservationCalendar(w http.ResponseWriter, r *http.Request) { err := r.ParseForm() if err != nil { m.App.Session.Put(r.Context(), "error", "can't parse form!") http.Redirect(w, r, "/", http.StatusSeeOther) return } // to redirect user to the same page year := r.Form.Get("y") month := r.Form.Get("m") // process blocks rooms, err := m.DB.GetRooms() if err != nil { helpers.ServerError(w, err) return } form := forms.New(r.PostForm) for _, room := range rooms { // get block map from the session before user make any changes. // if we have an entry in the map that does not exist in the posted data,and if restriction_id > 0 // then it is a block we need to remove currentMap := m.App.Session.Get(r.Context(), fmt.Sprintf("block_map_%d", room.ID)).(map[string]int) for name, value := range currentMap { if value > 0 && !form.Has(fmt.Sprintf("remove_block_%d_%s", room.ID, name)) { // delete restriction by id fmt.Println("would delete ", value) err := m.DB.RemoveBlockByID(value) if err != nil { m.App.ErrorLog.Println(err) } } } } // handle new blocks for name, _ := range r.PostForm { if strings.HasPrefix(name, "add_block") { // add_block_1_2006-01-2 split := strings.Split(name, "_") roomID, _ := strconv.Atoi(split[2]) startDate, _ := time.Parse("2006-01-2", split[3]) fmt.Println("would insert block to room", roomID, "for date", split[3]) err := m.DB.InsertBlockForRoom(roomID, startDate) if err != nil { m.App.ErrorLog.Println(err) } } } m.App.Session.Put(r.Context(), "flash", "changes saved") http.Redirect(w, r, fmt.Sprintf("/admin/reservations-calendar?y=%s&m=%s", year, month), http.StatusSeeOther) }
#include <nss.h> #include <pwd.h> #include <stdio.h> #include <stdlib.h> #include <string.h> static int do_test (void) { int retval = 0; __nss_configure_lookup ("passwd", "test1"); static const unsigned int pwdids[] = { 100, 30, 200, 60, 20000 }; #define npwdids (sizeof (pwdids) / sizeof (pwdids[0])) setpwent (); const unsigned int *np = pwdids; for (struct passwd *p = getpwent (); p != NULL; ++np, p = getpwent ()) if (p->pw_uid != *np || strncmp (p->pw_name, "name", 4) != 0 || atol (p->pw_name + 4) != *np) { printf ("passwd entry %ju wrong (%s, %u)\n", np - pwdids, p->pw_name, p->pw_uid); retval = 1; break; } endpwent (); for (int i = npwdids - 1; i >= 0; --i) { char buf[30]; snprintf (buf, sizeof (buf), "name%u", pwdids[i]); struct passwd *p = getpwnam (buf); if (p == NULL || p->pw_uid != pwdids[i] || strcmp (buf, p->pw_name) != 0) { printf ("passwd entry \"%s\" wrong\n", buf); retval = 1; } p = getpwuid (pwdids[i]); if (p == NULL || p->pw_uid != pwdids[i] || strcmp (buf, p->pw_name) != 0) { printf ("passwd entry %u wrong\n", pwdids[i]); retval = 1; } snprintf (buf, sizeof (buf), "name%u", pwdids[i] + 1); p = getpwnam (buf); if (p != NULL) { printf ("passwd entry \"%s\" wrong\n", buf); retval = 1; } p = getpwuid (pwdids[i] + 1); if (p != NULL) { printf ("passwd entry %u wrong\n", pwdids[i] + 1); retval = 1; } } return retval; } #define TEST_FUNCTION do_test () #include "../test-skeleton.c"
<reponame>adevore/rudy use std::marker::PhantomData; use std::mem; use std::ptr; use util::locksteparray; use util::SliceExt; use super::jpm::jpm_root::Jpm; use ::Key; use ::rudymap::results::{InsertResult, RemoveResult}; use std::iter; use super::rootptr::RootPtr; pub trait RootLeaf<K: Key, V> { fn get(&self, key: K) -> Option<&V>; fn get_mut(&mut self, key: K) -> Option<&mut V>; fn insert(&mut self, key: K, value: V) -> InsertResult<V>; fn expand(self, key: K, value: V) -> RootPtr<K, V>; fn remove(&mut self, key: K) -> RemoveResult<V>; fn shrink_remove(self, key: K) -> (RootPtr<K, V>, V); fn len(&self) -> usize; fn memory_usage(&self) -> usize { mem::size_of_val(self) } } pub struct Empty<K: Key, V>(PhantomData<(K, V)>); impl<K: Key, V> Empty<K, V> { pub fn new() -> Empty<K, V> { Empty(PhantomData) } } impl<K: Key, V> RootLeaf<K, V> for Empty<K, V> { fn get(&self, key: K) -> Option<&V> { None } fn get_mut(&mut self, key: K) -> Option<&mut V> { None } fn insert(&mut self, key: K, value: V) -> InsertResult<V> { InsertResult::Resize(value) } fn remove(&mut self, key: K) -> RemoveResult<V> { RemoveResult::Success(None) } fn shrink_remove(self, key: K) -> (RootPtr<K, V>, V){ unreachable!(); } fn expand(self, key: K, value: V) -> RootPtr<K, V> { Box::new(Leaf1::new(key, value)).into() } fn len(&self) -> usize { 0 } } impl<K: Key, V> Default for Empty<K, V> { fn default() -> Empty<K, V> { Empty::new() } } impl<'a, K: Key + 'a, V: 'a> IntoIterator for &'a Empty<K, V> { type Item = (K, &'a V); type IntoIter = iter::Empty<Self::Item>; fn into_iter(self) -> Self::IntoIter { iter::empty() } } pub struct Leaf1<K: Key, V> { key: K, value: V } impl<K: Key, V> Leaf1<K, V> { pub fn new(key: K, value: V) -> Leaf1<K, V> { Leaf1 { key, value } } } impl<'a, K: Key + 'a, V: 'a> IntoIterator for &'a Leaf1<K, V> { type Item = (K, &'a V); type IntoIter = iter::Once<Self::Item>; fn into_iter(self) -> Self::IntoIter { iter::once((self.key, &self.value)) } } /// A leaf root with one item. impl<K: Key, V> RootLeaf<K, V> for Leaf1<K, V> { fn get(&self, key: K) -> Option<&V> { if self.key == key { Some(&self.value) } else { None } } fn get_mut(&mut self, key: K) -> Option<&mut V> { if self.key == key { Some(&mut self.value) } else { None } } fn insert(&mut self, key: K, value: V) -> InsertResult<V> { if self.key == key { InsertResult::replace(&mut self.value, value) } else { InsertResult::Resize(value) } } fn expand(self, key: K, value: V) -> RootPtr<K, V> { Box::new(Leaf2::new(self.key, self.value, key, value)).into() } fn remove(&mut self, key: K) -> RemoveResult<V> { if self.key == key { RemoveResult::Downsize } else { RemoveResult::Success(None) } } fn shrink_remove(self, key: K) -> (RootPtr<K, V>, V) { let Leaf1 { key: node_key, value } = self; let ptr = RootPtr::empty(); debug_assert_eq!(node_key, key); (ptr, value) } fn len(&self) -> usize { 1 } } pub struct Leaf2<K: Key, V> { keys: [K; 2], values: [V; 2] } impl<K: Key, V> Leaf2<K, V> { pub fn new(key1: K, value1: V, key2: K, value2: V) -> Leaf2<K, V> { if key1 < key2 { Leaf2 { keys: [key1, key2], values: [value1, value2] } } else { Leaf2 { keys: [key2, key1], values: [value2, value1] } } } } impl<K: Key, V> RootLeaf<K, V> for Leaf2<K, V> { fn get(&self, key: K) -> Option<&V> { self.keys.iter() .zip(self.values.iter()) .find(|&(&leaf_key, _)| leaf_key == key) .map(|(key, value)| value) } fn get_mut(&mut self, key: K) -> Option<&mut V> { self.keys.iter() .zip(self.values.iter_mut()) .find(|&(&leaf_key, _)| leaf_key == key) .map(|(key, value)| value) } /// Attempt to insert, fail if we didn't find a key to replace fn insert(&mut self, key: K, value: V) -> InsertResult<V> { for (i, leaf_key) in self.keys.iter().enumerate() { if key == *leaf_key { return InsertResult::replace(&mut self.values[i], value); } } InsertResult::Resize(value) } fn expand(self, key: K, value: V) -> RootPtr<K, V> { let Leaf2 { keys, values } = self; let mut leaf = Box::new(VecLeaf::from_arrays(keys, values)); leaf.insert(key, value).success(); leaf.into() } fn remove(&mut self, key: K) -> RemoveResult<V> { self.keys.iter() .find(|&&k| k == key) .map(|_| RemoveResult::Downsize) .unwrap_or(RemoveResult::Success(None)) } fn shrink_remove(self, key: K) -> (RootPtr<K, V>, V) { let Leaf2 { keys, mut values } = self; let key1 = keys[0]; let key2 = keys[1]; let (value1, value2); unsafe { value1 = ptr::read(&mut values[0]); value2 = ptr::read(&mut values[1]); mem::forget(values); } if key1 == key { let ptr = Box::new(Leaf1::new(key2, value2)).into(); (ptr, value1) } else { let ptr = Box::new(Leaf1::new(key1, value1)).into(); (ptr, value2) } } fn len(&self) -> usize { 2 } } pub struct VecLeaf<K: Key, V> { array: locksteparray::LockstepArray<[K; 31], [V; 31]> } impl<K: Key, V> VecLeaf<K, V> { fn new() -> VecLeaf<K, V> { // TODO Copy memory from values VecLeaf { array: locksteparray::LockstepArray::new() } } fn from_arrays(keys: [K; 2], values: [V; 2]) -> VecLeaf<K, V> { VecLeaf { array: locksteparray::LockstepArray::from_arrays(keys, values) } } } impl<K: Key, V> IntoIterator for VecLeaf<K, V> { type Item = (K, V); type IntoIter = locksteparray::IntoIter<[K; 31], [V; 31]>; fn into_iter(self) -> Self::IntoIter { self.array.into_iter() } } impl<K: Key, V> RootLeaf<K, V> for VecLeaf<K, V> { fn get(&self, key: K) -> Option<&V> { self.array.array1() .iter() .position(|&k| k == key) .map(|index| &self.array.array2()[index]) } fn get_mut(&mut self, key: K) -> Option<&mut V> { self.array.array1_mut() .iter() .position(|&k| k == key) .map(move |index| &mut self.array.array2_mut()[index]) } fn insert(&mut self, key: K, value: V) -> InsertResult<V> { match self.array.array1().linear_search(&key) { Ok(replace) => { InsertResult::replace(&mut self.array.array2_mut()[replace], value) }, Err(insert) => match self.array.insert(insert, key, value) { Ok(()) => InsertResult::Success(None), Err(locksteparray::InsertError::Overflow(key, value)) => { InsertResult::Resize(value) }, Err(locksteparray::InsertError::OutOfBounds(..)) => { unreachable!() } } } } fn expand(self, key: K, value: V) -> RootPtr<K, V> { let mut jpm: Jpm<K, V> = self.into_iter().collect(); jpm.insert(key, value).success(); Box::new(jpm).into() } fn remove(&mut self, key: K) -> RemoveResult<V> { let evicted = self.array.array1_mut() .iter() .position(|&k| k == key) .and_then(|index| self.array.remove(index)) .map(|(_, value)| value); RemoveResult::Success(evicted) } fn shrink_remove(self, key: K) -> (RootPtr<K, V>, V) { unreachable!() } fn len(&self) -> usize { self.array.len() } }
/*! Presentation. ## Lifecycle Whenever a submission detects the use of any surface texture, it adds it to the device tracker for the duration of the submission (temporarily, while recording). It's added with `UNINITIALIZED` state and transitioned into `empty()` state. When this texture is presented, we remove it from the device tracker as well as extract it from the hub. !*/ #[cfg(feature = "trace")] use crate::device::trace::Action; use crate::{ conv, device::DeviceError, hub::{Global, GlobalIdentityHandlerFactory, HalApi, Input, Token}, id::{DeviceId, SurfaceId, TextureId, Valid}, resource, track::TextureSelector, LifeGuard, Stored, }; use hal::{Queue as _, Surface as _}; use thiserror::Error; use wgt::SurfaceStatus as Status; const FRAME_TIMEOUT_MS: u32 = 1000; pub const DESIRED_NUM_FRAMES: u32 = 3; #[derive(Debug)] pub(crate) struct Presentation { pub(crate) device_id: Stored<DeviceId>, pub(crate) config: wgt::SurfaceConfiguration, #[allow(unused)] pub(crate) num_frames: u32, pub(crate) acquired_texture: Option<Stored<TextureId>>, } impl Presentation { pub(crate) fn backend(&self) -> wgt::Backend { crate::id::TypedId::unzip(self.device_id.value.0).2 } } #[derive(Clone, Debug, Error)] pub enum SurfaceError { #[error("surface is invalid")] Invalid, #[error("surface is not configured for presentation")] NotConfigured, #[error(transparent)] Device(#[from] DeviceError), #[error("surface image is already acquired")] AlreadyAcquired, #[error("acquired frame is still referenced")] StillReferenced, } #[derive(Clone, Debug, Error)] pub enum ConfigureSurfaceError { #[error(transparent)] Device(#[from] DeviceError), #[error("invalid surface")] InvalidSurface, #[error("`SurfaceOutput` must be dropped before a new `Surface` is made")] PreviousOutputExists, #[error("Both `Surface` width and height must be non-zero. Wait to recreate the `Surface` until the window has non-zero area.")] ZeroArea, #[error("surface does not support the adapter's queue family")] UnsupportedQueueFamily, #[error("requested format {requested:?} is not in list of supported formats: {available:?}")] UnsupportedFormat { requested: wgt::TextureFormat, available: Vec<wgt::TextureFormat>, }, #[error("requested usage is not supported")] UnsupportedUsage, } #[repr(C)] #[derive(Debug)] pub struct SurfaceOutput { pub status: Status, pub texture_id: Option<TextureId>, } impl<G: GlobalIdentityHandlerFactory> Global<G> { pub fn surface_get_current_texture<A: HalApi>( &self, surface_id: SurfaceId, texture_id_in: Input<G, TextureId>, ) -> Result<SurfaceOutput, SurfaceError> { profiling::scope!("get_next_texture", "SwapChain"); let hub = A::hub(self); let mut token = Token::root(); let fid = hub.textures.prepare(texture_id_in); let (mut surface_guard, mut token) = self.surfaces.write(&mut token); let surface = surface_guard .get_mut(surface_id) .map_err(|_| SurfaceError::Invalid)?; let (device_guard, mut token) = hub.devices.read(&mut token); let (device, config) = match surface.presentation { Some(ref present) => { let device = &device_guard[present.device_id.value]; (device, present.config.clone()) } None => return Err(SurfaceError::NotConfigured), }; #[cfg(feature = "trace")] if let Some(ref trace) = device.trace { trace.lock().add(Action::GetSurfaceTexture { id: fid.id(), parent_id: surface_id, }); } #[cfg(not(feature = "trace"))] let _ = device; let suf = A::get_surface_mut(surface); let (texture_id, status) = match unsafe { suf.raw.acquire_texture(FRAME_TIMEOUT_MS) } { Ok(Some(ast)) => { let present = surface.presentation.as_mut().unwrap(); let texture = resource::Texture { inner: resource::TextureInner::Surface { raw: ast.texture, parent_id: Valid(surface_id), has_work: false, }, device_id: present.device_id.clone(), desc: wgt::TextureDescriptor { label: (), size: wgt::Extent3d { width: config.width, height: config.height, depth_or_array_layers: 1, }, sample_count: 1, mip_level_count: 1, format: config.format, dimension: wgt::TextureDimension::D2, usage: config.usage, }, hal_usage: conv::map_texture_usage(config.usage, config.format.into()), format_features: wgt::TextureFormatFeatures { allowed_usages: wgt::TextureUsages::RENDER_ATTACHMENT, flags: wgt::TextureFormatFeatureFlags::empty(), filterable: false, }, full_range: TextureSelector { layers: 0..1, levels: 0..1, }, life_guard: LifeGuard::new("<Surface>"), }; let ref_count = texture.life_guard.add_ref(); let id = fid.assign(texture, &mut token); //suf.acquired_texture = Some(suf_texture); if present.acquired_texture.is_some() { return Err(SurfaceError::AlreadyAcquired); } present.acquired_texture = Some(Stored { value: id, ref_count, }); let status = if ast.suboptimal { Status::Suboptimal } else { Status::Good }; (Some(id.0), status) } Ok(None) => (None, Status::Timeout), Err(err) => ( None, match err { hal::SurfaceError::Lost => Status::Lost, hal::SurfaceError::Device(err) => { return Err(DeviceError::from(err).into()); } hal::SurfaceError::Outdated => Status::Outdated, hal::SurfaceError::Other(msg) => { log::error!("acquire error: {}", msg); Status::Lost } }, ), }; Ok(SurfaceOutput { status, texture_id }) } pub fn surface_present<A: HalApi>( &self, surface_id: SurfaceId, ) -> Result<Status, SurfaceError> { profiling::scope!("present", "SwapChain"); let hub = A::hub(self); let mut token = Token::root(); let (mut surface_guard, mut token) = self.surfaces.write(&mut token); let surface = surface_guard .get_mut(surface_id) .map_err(|_| SurfaceError::Invalid)?; let (mut device_guard, mut token) = hub.devices.write(&mut token); let present = match surface.presentation { Some(ref mut present) => present, None => return Err(SurfaceError::NotConfigured), }; let device = &mut device_guard[present.device_id.value]; #[cfg(feature = "trace")] if let Some(ref trace) = device.trace { trace.lock().add(Action::Present(surface_id)); } let result = { let texture_id = present .acquired_texture .take() .ok_or(SurfaceError::AlreadyAcquired)?; // The texture ID got added to the device tracker by `submit()`, // and now we are moving it away. device.trackers.lock().textures.remove(texture_id.value); let (texture, _) = hub.textures.unregister(texture_id.value.0, &mut token); if let Some(texture) = texture { let suf = A::get_surface_mut(surface); match texture.inner { resource::TextureInner::Surface { raw, parent_id, has_work, } => { if surface_id != parent_id.0 { log::error!("Presented frame is from a different surface"); Err(hal::SurfaceError::Lost) } else if !has_work { log::error!("No work has been submitted for this frame"); unsafe { suf.raw.discard_texture(raw) }; Err(hal::SurfaceError::Outdated) } else { unsafe { device.queue.present(&mut suf.raw, raw) } } } resource::TextureInner::Native { .. } => unreachable!(), } } else { Err(hal::SurfaceError::Outdated) //TODO? } }; log::debug!("Presented. End of Frame"); match result { Ok(()) => Ok(Status::Good), Err(err) => match err { hal::SurfaceError::Lost => Ok(Status::Lost), hal::SurfaceError::Device(err) => Err(SurfaceError::from(DeviceError::from(err))), hal::SurfaceError::Outdated => Ok(Status::Outdated), hal::SurfaceError::Other(msg) => { log::error!("acquire error: {}", msg); Err(SurfaceError::Invalid) } }, } } }
Histoire Naturelle Approach Each group is introduced with a general essay. This is followed by an article, sometimes of many pages, on each animal (or other item). The article on the wolf begins with the claim that it is one of the animals with a specially strong appetite for flesh; it asserts that the animal is naturally coarse and cowardly (grossier et poltron), but becoming crafty at need, and hardy by necessity, driven by hunger. The language, as in this instance, is elegant and elaborate, even "flowery and ornate". Buffon was roundly criticised by his fellow academics for writing a "purely popularizing work, empty and puffed up, with little real scientific value". The species is named in Greek, Latin, Italian, Spanish, German, English, Swedish, and Polish. The zoological descriptions of the species by Gessner, Ray, Linnaeus, Klein and Buffon himself ("Canis ex griseo flavescens. Lupus vulgaris. Buffon. Reg. animal. pag. 235") are cited. The text is written as a continuous essay, without the sections on identification, distribution and behaviour that might have been expected from other natural histories. Parts concern human responses rather than the animal itself, as for example that the wolf likes human flesh, and the strongest wolves sometimes eat nothing else. Measurements may be included; in the case of the wolf, 41 separate measurements are tabulated, in pre-revolutionary French feet and inches starting with the "Length of the whole body measured in a straight line from the end of the muzzle to the anus........3 feet. 7 inches." (1.2 m); the "Length of the largest claws" is given as "10 lines" (2.2 cm). The wolf is illustrated standing in farmland, and as a complete skeleton standing on a stone plinth in a landscape. The account of the species occupies 32 pages including illustrations. Buffon's original edition continued by Lacépède The original edition of the Histoire Naturelle by Buffon comprised 36 volumes in quarto, divided into the following series: Histoire de la Terre et de l'Homme, Quadrupèdes, Oiseaux, Minéraux, Suppléments. Buffon edited 35 volumes in his lifetime. Soon after his death, the fifth and final volume of l’Histoire des minéraux appeared in 1788 at the Imprimerie des Bâtiments du Roi. The seventh and final volume of Suppléments by Buffon was published posthumously in 1789 through Lacépède's hands. Lacépède continued the part of the Histoire Naturelle which dealt with animals. A few months before Buffon's death, en 1788, Lacépède published, as a continuation, the first volume of his Histoire des Reptiles, on egg-laying quadrupeds. The next year, he wrote a second volume on snakes, published during the French Revolution. Between 1798 and 1803, he brought out the volume Histoire des Poissons. Lacépède made use of the notes and collections left by Philibert Commerson (1727–1773). He wrote Histoire des Cétacés which was printed in 1804. At that point, the Histoire Naturelle, by Buffon and Lacépède, thus contained 44 quarto volumes forming the definitive edition. Variations in the editions by Buffon and Lacépède Another edition in quarto format was printed by the Imprimerie royale in 36 volumes (1774–1804). It consisted of 28 volumes par Buffon, and 8 volumes by Lacépède. The part containing anatomical articles by Louis Jean-Marie Daubenton was dropped. The supplements were merged into the relevant articles in the main volumes. The Imprimerie royale also published two editions of the Histoire Naturelle in duodecimo format (1752–1805), occupying 90 or 71 volumes, depending on whether or not they included the part on anatomy. In this print format, the original work by Buffon occupied 73 volumes with the part on anatomy, or 54 volumes without the part on anatomy. The continuation by Lacépède took up 17 duodecimo volumes. A de luxe edition of Histoire Naturelle des Oiseaux (Birds) (1771–1786) was produced by the Imprimerie royale in 10 folio and quarto volumes, with 1008 engraved and hand-coloured plates, executed under Buffon's personal supervision by Edme-Louis Daubenton, cousin and brother-in-law of Buffon's principal collaborator. Translations The Histoire Naturelle was translated into languages including English, German, Swedish, Russian and Italian. Many translations, often partial (single volumes, or all volumes to a certain date), abridged, reprinted in the same translation by different printers, or with additional text (for example on insects) and new illustrations, were made at the end of the eighteenth century and the start of the nineteenth century, presenting a complicated publication history. Early translations were necessarily only of the earlier volumes. Given the complexity, all catalogue dates other than of single volumes should be taken as approximate. R. Griffith published an early translation of the volume on The Horse in London in 1762. T. Bell published a translation of the first six volumes in London between 1775 and 1776. William Creech published an edition in Edinburgh between 1780 and 1785. T. Cadell and W. Davies published another edition in London in 1812. An abridged edition was published by Wogan, Byrne et al. in Dublin in 1791; that same year R. Morison and Son of Perth, J. and J. Fairbairn of Edinburgh and T. Kay and C. Forster of London published their edition. W. Strahan and T. Cadell published a translation with notes by the encyclopaedist William Smellie in London around 1785. Barr's Buffon in ten volumes was published in London between 1797 and 1807. W. Davidson published an abridged version including the natural history of insects taken from Swammerdam, Brookes, Goldsmith et al., with "elegant engravings on wood"; its four volumes appeared in Alnwick in 1814. German translations include those published by Joseph Georg Trassler 1784–1785; by Pauli, 1772–1829; Grund and Holle, 1750–1775; and Johann Samuel Heinsius, 1756–1782. Italian translations include those published by Fratelle Bassaglia around 1788 and Boringherieri in 1959. Per Olof Gravander translated an 1802–1803 French abridgement into Swedish, publishing it in Örebro in 1806–1807. A Russian version (The General and Particular Natural History by Count Buffon; "Всеобщая и частная естественная история графа Бюффона") was brought out by The Imperial Academy of Sciences (Императорской Академией Наук) in St. Petersburg between 1789 and 1808. Children's An abridged edition for children was published by Frederick Warne in London and Scribner, Welford and Co. c. 1870. Contemporary The Histoire Naturelle had a distinctly mixed reception in the eighteenth century. Wealthy homes in both England and France purchased copies, and the first edition was sold out within six weeks. But Buffon was criticised by some priests for suggesting (in the essay Les Epoques de Nature, Volume XXXIV) that the earth was more than 6,000 years old and that mountains had arisen in geological time. Buffon cites as evidence that fossil sea-shells had been found at the tops of mountains; but the claim was seen as contradicting the biblical account in the Book of Genesis. Buffon also disagreed with Linnaeus's system of classifying plants as described in Systema Naturae (1735). In Buffon's view, expounded in the "Premier Discours" of the Histoire Naturelle (1749), the concept of species was entirely artificial, the only real entity in nature being the individual; as for a taxonomy based on the number of stamens or pistils in a flower, mere counting (despite Buffon's own training in mathematics) had no bearing on nature. The Paris faculty of theology, acting as the official censor, wrote to Buffon with a list of statements in the Histoire Naturelle that were contradictory to Roman Catholic Church teaching. Hypocritically, Buffon replied that he believed firmly in the biblical account of creation, and was able to continue printing his book, and remain in position as the leader of the 'old school', complete with his job as director of the royal botanical garden. On Buffon's death, the 19-year-old Georges Cuvier celebrated with the words "This time, the Comte de Buffon is dead and buried". Soon afterwards, the French revolution went much further in sweeping away old attitudes to natural history, along with much else. Philosophy The Stanford Encyclopedia of Philosophy calls the Histoire Naturelle "Buffon's major work", observing that "In addressing the history of the earth, Buffon also broke with the 'counter-factual' tradition of Descartes, and presented a secular and realist account of the origins of the earth and its life forms." In its view, the work created an "age of Buffon", defining what natural history itself was, while Buffon's "Discourse on Method" (unlike that of Descartes) at the start of the work argued that repeated observation could lead to a greater certainty of knowledge even than "mathematical analysis of nature". Buffon also led natural history away from the natural theology of British parson-naturalists such as John Ray. He thus offered both a new methodology and an empirical style of enquiry. Buffon's position on evolution is complex; he noted in Volume 4 from Daubenton's comparative anatomy of the horse and the donkey that species might "transform", but initially (1753) rejected the possibility. However, in doing so he changed the definition of a species from a fixed or universal class (which could not change, by definition) to "the historical succession of ancestor and descendant linked by material connection through generation", identified by the ability to mate and produce fertile offspring. Thus the horse and donkey, which produce only sterile hybrids, are seen empirically not to be the same species, even though they have similar anatomy. That empirical fact leaves open the possibility of evolution. Style The botanist Sandra Knapp writes that "Buffon's prose was so purple that the ideas themselves are almost hidden", observing that this was also the contemporary academic opinion. She notes that some quite radical ideas are to be found in his work, but they are almost invisible, given the language they are cloaked in. She quotes Buffon's dramatic description of the lion, which along with the engraving in her view "emphasized both the lion's regal bearing and personality not only in his text but also in the illustration... A reader was left in no doubt as to the importance and character of the animal." She concludes "No wonder the cultured aristocratic public lapped it up – the text reads more like a romantic novel than a dry scientific treatise". Evolutionary thought The evolutionary biologist Ernst Mayr comments that "In this monumental and fascinating Histoire naturelle, Buffon dealt in a stimulating manner with almost all the problems that would subsequently be raised by evolutionists. Written in a brilliant style, this work was read in French or in one of the numerous translations by every educated person in Europe". Mayr argued that "virtually all the well-known writers of the Enlightenment" were "Buffonians", and calls Buffon "the father of all thought in natural history in the second half of the eighteenth century". Mayr notes that Buffon was not an "evolutionist", but was certainly responsible for creating the great amount of interest in natural history in France. He agrees that Buffon's thought is hard to classify and even self-contradictory, and that the theologians forced him to avoid writing some of his opinions openly. Mayr argues however that Buffon was "fully aware of the possibility of 'common descent', and was perhaps the first author ever to articulate it clearly", quoting Buffon at length, starting with "Not only the ass and the horse, but also man, the apes, the quadrupeds, and all the animals might be regarded as constituting but a single family", and later "that man and ape have a common origin", and that "the power of nature...with sufficient time, she has been able from a single being to derive all the other organized beings". Mayr notes, however, that Buffon immediately rejects the suggestion and offers three arguments against it, namely that no new species have arisen in historical times; that hybrid infertility firmly separates species; and that animals intermediate between, say, the horse and the donkey are not seen (in the fossil record).
Media Literacy Among Adolescents: Preventive to The Impact of Pornography on Youtube Social Media in South Tangerang MEDIA LITERACY AMONG ADOLESCENTS : PREVENTIVE TO THE IMPACT OF PORNOGRAPHY ON YOUTUBE SOCIAL MEDIA IN SOUTH TANGERANG The social problem that is rife today is related to the impact of pornography on social media among children and adolescents. Pornography has a very big danger, especially teenagers. Adolescent psychology is still unstable and the growth of sexual hormones in adolescents, making pornography has a danger (negative impact) is very large on adolescents. Not only drugs that contain addictions, pornography also makes viewers addicted / addicted. For teens, addicted to porn sites (cybersex) will make learning rhythm become chaotic. In general, addiction to porn sites will have a negative impact on ones character. Based on research by Bingham and Piotrowski in the Psychological Report entitled On-line Sexual Addiction, the character of people who are addicted to cybersex are: Inadequate social skills, preferring to wrestle with sexual fantasy, engrossed in communication with the characters created by him. own imagination, and unable to control themselves not to access porn sites and forget time. The results of a Child Protection Commission (KPA) survey of 4,500 teenagers revealed, 97 percent of teens had watched or accessed pornography and 93 percent kissed their lips. Like chemical addiction, pornography addicts tend to replace something important with sex or other forms of pornography. People who are addicted to pornography usually use media such as magazines, porn videos or most often is the internet. Adolescents who generally still live with parents, therefore the role of supervision from parents is needed to provide an understanding of the use of social media wisely and directed to use social media positively. Parents can only provide gadgets as an easy access for their children to explore YouTube social media, but basically they also lack understanding about the media so there is still a lack of awareness for parents to control the content that their children see and consume on social media YouTube. Therefore, the need for media literacy training among adolescents is a form of prevention of the impact of pornography on YouTube social media in Kembangan Utara. BACKGROUND South Tangerang is a city in the province of Banten, Indonesia. Located 30 km (19 mi) on the southwestern border of Jakarta, the city forms part of the Greater Jakarta metropolitan area. It was administratively separated from Tangerang Regency on October 29, 2008. South Tangerang City is located in the eastern part of Banten Province, namely at the coordinate point 106 ° 38 '-106 ° 47' East Longitude and 06 ° 13'30 "-06 ° 22'30" Latitude South and administratively consists of 7 districts, 49 subdistricts and 5 villages with an area of 147.19 km2 or 14,719 hectares. The municipality of South Tangerang is divided into seven districts (kecamatan), tabulated below with their areas and population totals from the 2010 Census and the official estimates for mid 2019. The centre of South Tangerang is the Ciputat district. The table also includes the number of administrative villages (rural desa and urban kelurahan) in each district, and its postal codes. The districts are sub-divided into 49 urban villages (kelurahan) and five rural villages (desa). The population is an asset for an area that has a big enough role in determining the acceleration of regional development if it is supported by good quality. The population has two roles in the economic field, namely as producers and consumers. The population development of an area is determined by the rate of birth, death, and population migration. Based on the results of the 2010 Population Census by the BPS of South Tangerang City, the population of South Tangerang City was 1,290,322 people, the population density in this area reached 8,856 people / km2 in 2010. The male population was 652,281 people while the female population was 638,041 people. The sex ratio is 102.23, which indicates that there are slightly more males than females. South Tangerang City has a motto "Intelligent, Modern and Religious", noble qualities that challenge and hope for all parties. Hoping to have a bright future absolutely requires a good design including, measured stages, at least referring to the concept of life to be realized: intelligent-modern-religious. The future is bright in the context of "Smart" regarding the world of education with all aspects of its relationship: physical infrastructure (school buildings, laboratories, libraries, and the like), software, design curriculum content, administrative systems and procedures, as well as the welfare of employees and educators, including the quality standards of students. The bright future in the context of "Modern" involves many factors of life which are interrelated, it cannot be denied that formal education is structured dominantly to shape human behavior. A person or a group of society can be said to be modern, generally when the relevant community group has a "mutually" manners. respect, ethics, and culture ", rarely get caught up in open and prolonged conflicts. The bright future in the context of "Religious" is the pinnacle of perfection in life, it is almost certain that when a group of people or the majority of the people of a region have reached the intelligent and modern phase of life, then actually that society can also be said to have entered the religious phase. The critical problem that afflicts teenagers in South Tangerang is the problem associated with the impact of pornography on social media among teenagers. Pornography has a huge danger, especially for teenagers. Adolescent psychology is still unstable and the growth of sexual hormones in adolescents, making pornography has a very large danger (negative impact) on adolescents. Not only drugs that contain addiction, pornography also makes the connoisseur addicted. For teens, addicted to porn sites (cybersex) will make learning rhythms chaotic. In general, being addicted to porn sites will have a negative impact on one's character. Based on research by Bingham and Piotrowski in Psychological Report entitled On-line Sexual Addiction, the character of a person who is addicted to cybersex is: Social skills are inadequate, preferring to deal with fantasies of a sexual nature, fun communicating with figures created by his own imagination, and not able to control themselves from accessing porn sites and forgetting time. The results of the Child Protection Commission (KPA) survey of 4,500 adolescents revealed that 97 percent of teens had watched or accessed pornography and 93 percent had kissed their lips. Like chemical addiction, pornography addicts tend to replace something important with sex or other forms of pornography. People who are addicted to pornography usually use media such as magazines, pornographic videos or most often the internet. Research Problem During this time many parties who have not bothered to provide healthy information about sex to children and adolescents. Many parents themselves still consider sex as something taboo, so they never give healthy information about sex to their children, especially those who are in their teens. This is also due to their own lack of information about sex and do not know how to communicate good sex to children. Therefore, to fulfill his great curiosity about sex, often teenagers look for alternatives by enjoying pornography in secret, both alone and with their friends. Even more alarming is when a child is addicted to pornography on the Internet. Within a week there were more than 4000 porn sites created. What a worrying number. Initially, maybe a child does not intend to view pornography and will use the Internet for good purposes. But this porn site can appear suddenly when a child is looking for information material for his schoolwork or for other purposes. A child who is still innocent can not judge the good or bad of a thing, so a child aged 8-12 years is often the target. Not surprisingly, sexual crimes committed by adolescents and minors are increasingly prevalent lately. Various adverse effects of watching porn films for minors are mental and moral damage which impact on sexual crimes such as rape, pregnancy out of wedlock, venereal disease, and various other adverse effects. From the habit of watching, then want to try, addicted to even a few of those who try to make their own obscene videos for the sake of perpetuating sexual activity with a partner. In courtship relationships, relationships that develop become unhealthy. People who are involved in pornography will make their lovers as objects of sexual acts that they watch through porn sites. In addition, in many cases, pornography makes a person lose his power, pornography can damage sexual relations with a partner because they are accustomed to imagining others in sexual relations. Imagination is one of the strong effects of pornography. The value and sexual purity actually become corrupted. Once a person sees pornography, all images and scenes will be embedded in his subconscious mind. That will bring it deeper. True moral values fade away, giving rise to a confusing double standard. As a result, he is far from spiritual activities. Praying will not be able to concentrate because the mind is filled with images and sexual scenes. Therefore, it is necessary to carry out media literacy among adolescents to prevent the impact of pornography on social media, especially YouTube. Solution Currently, the spread of pornography sites is mostly happening on social media. Social media is an online media, where users can easily participate and share information. Social media technology takes various forms including magazines, internet forums, weblogs, social blogs, microblogging, wikis, podcasts, photos or images, videos, social rankings and bookmarks. Social media has the following characteristics: the message delivered is not only for one person but can be many people for example messages via SMS or the internet, messages are delivered freely, without having to go through a Gatekeeper, messages delivered tend to be faster compared to other media, and the recipient of the message that determines the time of interaction The users of web sites share media content, such as videos, ebooks, images, and others. YouTube is a popular video sharing website, where users can load, watch and share video clips for free. Generally the videos on YouTube are music clips (video clips), movies, TV, and videos made by the users themselves. The format used by videos on YouTube is.flv which can be played on web browsers that have the Flash Player plugin. Most content on YouTube is uploaded by individuals, although media companies such as CBS, BBC, Vevo, Hulu, and other organizations have uploaded their material to this site as part of YouTube's partnership program. Unregistered users can watch videos, while registered users can upload an unlimited number of videos. Normatively applicable law, videos deemed to contain offensive content can only be watched by registered users aged 18 years or over. However, in reality many children and adolescents under the age of 18 can access pornographic videos on YouTube social media. Pornographic images that are on pornographic websites, usually will be attached and difficult to be removed in the minds of children in a long period of time. Technology is one of the media that can help humans to live better, but if the technology is misused, it can destroy its users. The internet is one part of technology that is considered very important in accessing information and means to communicate, but at the same time can become a weapon to eat if it is not used wisely. Based on the survey results, Indonesia ranks seventh largest porn movie downloader in the world. Downloaders of porn sites in Indonesia are dominated by youth, adolescents and even minors. Most porn sites are downloaded via internet cafes, because they are afraid of being found out by parents if they do it at home Therefore, adolescents can no longer be indifferent and do not care about the current situation and conditions. Adolescents' understanding of pornography is very important in preventing (negative) the occurrence of negative effects of pornography on social media, especially social media YouTube. Adolescents need to be more caring and alert in their educating, especially who are in Kembangan Utara Village. Attitude is a reaction or process of someone who is still closed to a stimulus or object. Attitudes cannot be seen directly but can only be interpreted in advance from closed behavior. In general, the notion of attitude is the feelings, thoughts, and tendencies of someone who is more or less permanent in knowing certain aspects of their environment. The components of attitude are knowledge. feelings, and inclination to act. In another sense, attitude is an evaluative inclination towards an object or subject that has consequences that is how a person is dealing with the object of attitude. The attitude contained in the individual will give color or style of behavior or actions of the individual concerned. By understanding or knowing an individual's attitude, it can be predicted the response or behavior that will be taken by the individual concerned. The internet has become an inseparable part of modern life. Prohibit internet adolescents such as prohibiting children from using electricity and returning to the stone age. The answer should not be oriented back to the past or anti-modernization. Then the question that needs to be answered as well, How to protect teens from pornography on social media especially YouTube? In overcoming this problem, adolescents' understanding of pornography is very important. There are still many adolescents who are not aware of the dangers and negative impacts of social media, especially those related to pornography. The dangers on the Internet and the problem of Internet addiction are not insurmountable. By knowing the negative effects of the Internet, adolescents can protect themselve. The rise of pornography on social media YouTube is a regular spectacle of young people and teenagers. This phenomenon also appears among adolescents in Kembangan Utara who can easily access social media, including YouTube. To avoid and anticipate the negative effects of pornography on YouTube social media, it is necessary to conduct media literacy activities among teenagers in Kembangan Utara. There are several things that will be conveyed in media literacy activities among teenagers in Kembangan Utara to avoid and anticipate the negative effects of pornography on YouTube social media, including: 1. More sort of spectacle, especially those on YouTube social media because of the more diverse content displayed that can be positive or negative. 2. It is hoped that it is not easy to try the trends that are on YouTube, especially if the trend is not in accordance with the norms in the community. 3. It is hoped that it does not rule out the possibility of using YouTube positively, other than as a means to seek information, as well as a means to create and develop creativities. Output Target Technology is like two sides of a coin, one side contains a positive element, while on the other side it is negative and can be a threat to its users. This can also happen among teenagers who can access various information through social media. Threats to the dangers and negative impacts of social media, especially YouTube, include pornography. This can be prevented by taking preventive measures through media literacy activities. Media literacy is a person's ability to understand, analyze, and deconstruct media imaging. The ability to do this is so that the audience as media consumers (including children and adolescents) become aware of how the media are constructed (made) and accessed. This activity is intended so that adolescents can have the ability to understand, analyze, and criticize content on YouTube social media to avoid the negative effects of pornography that are found on YouTube social media. After participating in media literacy activities, it is targeted adolescents can know and understand the dangers and negative effects of pornography on social media, especially YouTube. In addition, this activity is expected to empower teenagers to be able to understand, analyze, and critique content that is on YouTube social media to avoid the negative effects of pornography that is widely found on YouTube social media. The output of this media literacy activity is the improvement of values in society. The target of this activity is adolescents who use digital media namely YouTube and social media. Adolescents' understanding of pornography is very important in preventing (negative) the occurrence of negative effects of pornography on social media, especially social media YouTube. Adolescents need to be more caring and alert in their educating, especially who are in Kembangan Utara. After completing this media literacy activity, adolescents are expected to have the ability to consider, analyze, and criticize content on YouTube social media to avoid the negative effects of pornography found on YouTube social media. In addition, by using media literacy, make them as individuals who are "media literate". Connected, individuals who are knowledgeable, knowledgeable, able to analyze, assess, and be able to debate the information or media messages obtained, so they can get a position on a particular issue or issue, and are easily carried away and negative things. METHODS This activity is a media literacy activity that includes seven skills, namely: Analysis (related to the ability to understand the content and content and dismantle and review a message or information from a media), evaluation (able to provide an assessment of an information message that the media convey), Grouping ( able to classify the various information that we obtain from a media in a certain similarity and difference), Induction (ability to analyze and review information from a specific nature in a small scope towards a general character as a whole), Deduction (ability to analyze and review information general nature then describes it into specific information), Synthesis (the ability to re-arrange a message or information from a media into a message in a new structure that is different from before), and Abstract (complete abilities and skills, starting from analyzing, describing right, look for problem points or issues to summarize the message and present it again in a language that is easier to understand). This training uses several methods or methods, namely: discussion, presentation and lecture. Focus group discussions (FGDs) are conducted by dividing participants into several small groups to discuss the themes or issues raised by the speakers, especially those related to pornography's negative impacts on social media, especially YouTube media. After the discussion is over, representatives of each group present the results of their group discussions alternately which are then responded to or responded to by other discussion groups. Furthermore, the results of focus group discussions (FGD) were discussed by the speakers by providing direction and explanation through lecture techniques. In this discussion, the speakers conveyed the main material about pornography and the egalistic impact of pornography on social media, especially YouTube media. Furthermore, it also discusses the Empowerment of Parents' Attitudes to prevent (preventive) the impact of negative pornography on social media Youtube. To further clarify and provide a good understanding of the material presented, the speakers gave examples relating to pornography on social media. RESULTS AND DISCUSSION The community service program entitled Media Literacy among Adolescents: Preventive to the Impact of Pornography on YouTube Social Media in South Tangerang was held online on Monday, March 8, 2021 through digital media zoom meetings due to the pandemic still hitting this year. This program is in collaboration with one of the international schools in the South Tangerang City area, Global Islamic School. The activity in the form of a webinar was attended by teachers and students from the Global Islamic School. Speakers in the webinar also consisted of Mercu Buana University and also Universiti Sains Malaysia. The event was hosted by students from Mercu Buana University who greeted and livened up the atmosphere in the webinar with their approach to the audience, namely students at the Global Islamic School, although online using digital media, they were still enthusiastic about this community service activity. Participants are coordinated to get a Zoom Meeting link for access to and take part in this community service program webinar. The series of events began with the opening remarks delivered by Mrs. Dwi Kurniawati, S.Pd as the Deputy Curriculum of the Global Islamic School. After that, the direct material presentation was given to the participants in parallel, first from the Mercu Buana University first and then followed by a material presentation session from Universiti Sains Malaysia. The material presented was made with not too much text but made it more telling stories such as sharing information so that it could be easily accepted and understood by the teenage participants, especially when it was carried out online, the presentation of the material had to be made as interesting as possible in order to avoid the participants feeling bored. The material presented is education in terms of media literacy, especially for Global Islamic School students who are still teenagers or as the younger generation to be able to understand the importance of preventing the impacts of pronography that are often found on social media. As is well known, teenagers are very close to today's technology, especially the freedom to use smartphones which not only function as a medium of communication but also as a medium for seeking information, whether it is about school knowledge or information as limited as entertainment for its users. One of them is the use of social media for teenagers who are inseparable from their daily life, almost all teenagers have social media accounts and actively use them, this should be a common concern because there is still a lack of protection and supervision from the media regarding the content that published so that these teenagers may find access to viewing pornographic content from social media, if this is allowed to continue, it will have an impact on adolescents who continue to consume pornographic content both internally for themselves and on their environment. The most popular social media for searching and viewing audio-visual content is Youtube, where users can easily search for information by typing in keywords in the search field, then a lot of content that is relevant to the keywords you are looking for will appear. From this ease of access and the lack of filters for Youtube users to get age-appropriate content, this can be an opportunity for teenagers who deliberately want to find pornographic content to see. Therefore, it is important to have a good understanding and form of self-respect for social media users, especially teenagers, to know the limitations of content on social media. Adolescence is a stage where someone is trying various things to explore themselves, their curiosity about many things also affects their search for a variety of information, supported by current conditions as well as the sophistication of technology that exists especially in digital media. Therefore, it is important for every teenager to be equipped with understanding and knowledge of digital media literacy so that they can also take advantage of the use of social media positively to have a positive impact on themselves, lest teenagers misuse the use of social media for negative things because it is also will have a bad impact, including pornographic content that is consumed continuously. Some of the discussions in the material for this community service program include the provision of information on applicable regulations related to information technology and pornography, after which there is an understanding of matters including pornography, porno-action, types and forms to the impact that pornography can cause. In this community service activity material, knowledge of how to solve the problem of pornography is also given to be carried out from a security system for social media platforms and also settings on digital media that are often used, including ways to overcome those who are addicted to pornography by various efforts were made consistently to keep it apart from this. This material is not only conveyed from one perspective with the situation in Indonesia, but also gets additional material from the point of view of the situation in Malaysia regarding the impact of pornography on social media. During the presentation of the material, it was seen that the results of the webinar of this community service program were that the participants who were teenagers actually already knew and understood the presence of social media for themselves, including about its benefits and other impacts that could be generated, both negative and positive, which returned to its users in take advantage of social media. Regarding ponographies that are often found in social media, they also feel that it is often found in various types and forms which in fact some are unconsciously included in the category of ponographical content. Therefore, when the material presentation has been completed, students are given the opportunity to ask questions if there are things that are not understood about the material or about the impact of pornography on social media. The students were quite active in asking questions to the informants, especially there were some foreign terms that they wanted to confirm about pornographic content, including the psychological state of people who were addicted to pornographic content, they wanted to know the form of impropriety someone had regarding pronography addiction, and a wider impact in the future if the addiction is not treated immediately. The hope of implementing community service programs in this media literacy educational webinar is that teenagers can control themselves in the use of social media and be wiser in using social media according to their needs and understand the limitations they must know according to their current age. Also related to the large number of pornographic content on social media, especially YouTube, teenagers must be equipped with good knowledge and understanding to filter the content they will consume, take precautions for themselves so they don't become addicts to pornographic content. With the education provided, it is hoped that teenagers can choose the information they receive from social media so that they are not adversely affected by the information received, one of which is pornographic content. CONCLUSION AND SUGGESTIONS This community service program at Mercu Buana University with the theme "Media Literacy among Adolescents: Preventive to the Impact of Pornography on YouTube Social Media in South Tangerang" has been well implemented. The conclusion of the activities carried out in this community service program is that basically adolescence is a stage where a person is trying various things for his own exploration, their curiosity about many things also affects in finding a variety of information, especially the current situation is supported by sophisticated technology. exists primarily in digital media. The results of the webinar on this community service program also show that the participants who are teenagers actually already know and understand the presence of social media for themselves, including about its benefits and other impacts that can be generated, both negative and positive, which return to users in utilizing the presence of media. social. Regarding ponographies that are often found in social media, they also feel that it is often found in various types and forms which in fact some are unconsciously included in the category of ponographical content. Therefore, it is important for every teenager to be equipped with understanding and knowledge of digital media literacy so that they can also take advantage of the use of social media positively to have a positive impact on themselves, lest teenagers misuse the use of social media for negative things because it is also will have a bad impact, including pornographic content that is consumed continuously. Based on the results of Community Service activities carried out in collaboration with the Global Islamic School located in the Serpong area of South Tangerang City, the suggestion that can be conveyed is that this activity can continue to be carried out in the future so that it can provide sustainable education for teenagers who become the next generation of the nation, in this case, is related to understanding and knowledge in the field of digital media literacy.
<reponame>MR-Zong/ZGTextProject // // ZGOneScrollViewController.h // ZGTextProj // // Created by 徐宗根 on 2018/1/11. // Copyright © 2018年 XuZonggen. All rights reserved. // #import <UIKit/UIKit.h> @interface ZGOneScrollViewController : UIViewController @end
/* * Copyright (c) 2016-2021 <NAME> <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package net.devh.boot.grpc.test.util; import static net.devh.boot.grpc.test.util.FutureAssertions.assertFutureEquals; import static net.devh.boot.grpc.test.util.FutureAssertions.assertFutureThrows; import static org.junit.Assert.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.UnaryOperator; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.function.Executable; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.internal.testing.StreamRecorder; /** * Assertions related to gRPC client calls. */ public final class GrpcAssertions { /** * Asserts that the first value in the {@link StreamRecorder} equals the expected value. * * @param <T> The type of the observer's content. * @param expected The expected content. * @param responseObserver The observer to check for the expected content. * @param timeout The maximum time to wait for the result. * @param timeoutUnit The time unit of the {@code timeout} argument. */ public static <T> void assertFutureFirstEquals(final T expected, final StreamRecorder<T> responseObserver, final int timeout, final TimeUnit timeoutUnit) { assertFutureFirstEquals(expected, responseObserver, UnaryOperator.identity(), timeout, timeoutUnit); } /** * Asserts that the first value in the {@link StreamRecorder} equals the expected value. * * @param <T> The type of the unwrapped/expected content. * @param <R> The type of the observer's content. * @param expected The expected content. * @param responseObserver The observer to check for the expected content. * @param unwrapper The function used to extract the content. * @param timeout The maximum time to wait for the result. * @param timeoutUnit The time unit of the {@code timeout} argument. */ public static <T, R> void assertFutureFirstEquals(final T expected, final StreamRecorder<R> responseObserver, final Function<R, T> unwrapper, final int timeout, final TimeUnit timeoutUnit) { assertFutureEquals(expected, responseObserver.firstValue(), unwrapper, timeout, timeoutUnit); } /** * Assert that the given {@link Executable} throws a {@link StatusRuntimeException}. * * @param executable The executable to run. * @return The thrown exception. * @see Assertions#assertThrows(Class, Executable) */ public static StatusRuntimeException assertThrowsStatus(final Executable executable) { return assertThrows(StatusRuntimeException.class, executable); } /** * Assert that the given {@link Executable} throws a {@link StatusRuntimeException} with the expected status code. * * @param expectedCode The expected status code. * @param executable The executable to run. * @return The status contained in the exception. * @see Assertions#assertThrows(Class, Executable) */ public static Status assertThrowsStatus(final Status.Code expectedCode, final Executable executable) { final StatusRuntimeException exception = assertThrowsStatus(executable); return assertStatus(expectedCode, exception); } /** * Asserts that the given {@link StreamRecorder} throws an {@link ExecutionException} caused by a * {@link StatusRuntimeException} with the expected status code. * * @param expectedCode The expected status code. * @param recorder The recorder expected to throw. * @param timeout The maximum time to wait for the result. * @param timeoutUnit The time unit of the {@code timeout} argument. * @return The status contained in the exception. * @see #assertFutureThrowsStatus(io.grpc.Status.Code, Future, int, TimeUnit) */ public static Status assertFutureThrowsStatus(final Status.Code expectedCode, final StreamRecorder<?> recorder, final int timeout, final TimeUnit timeoutUnit) { return assertFutureThrowsStatus(expectedCode, recorder.firstValue(), timeout, timeoutUnit); } /** * Asserts that the given {@link Future} throws an {@link ExecutionException} caused by a * {@link StatusRuntimeException} with the expected status code. * * @param future The future expected to throw. * @param timeout The maximum time to wait for the result. * @param timeoutUnit The time unit of the {@code timeout} argument. * @return The thrown StatusRuntimeException. */ public static StatusRuntimeException assertFutureThrowsStatus(final Future<?> future, final int timeout, final TimeUnit timeoutUnit) { return assertFutureThrows(StatusRuntimeException.class, future, timeout, timeoutUnit); } /** * Asserts that the given {@link Future} throws an {@link ExecutionException} caused by a * {@link StatusRuntimeException} with the expected status code. * * @param expectedCode The expected status code. * @param future The future expected to throw. * @param timeout The maximum time to wait for the result. * @param timeoutUnit The time unit of the {@code timeout} argument. * @return The status contained in the exception. */ public static Status assertFutureThrowsStatus(final Status.Code expectedCode, final Future<?> future, final int timeout, final TimeUnit timeoutUnit) { final StatusRuntimeException exception = assertFutureThrowsStatus(future, timeout, timeoutUnit); return assertStatus(expectedCode, exception); } /** * Asserts that the given {@link StatusRuntimeException} uses the expected status code. * * @param expectedCode The expected status code. * @param exception The exception to check for the status code. * @return The status contained in the exception. */ public static Status assertStatus(final Status.Code expectedCode, final StatusRuntimeException exception) { final Status status = exception.getStatus(); assertEquals(expectedCode, status.getCode()); return status; } private GrpcAssertions() {} }
<reponame>hdimitrieski/e-shop import { Injectable } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { environment } from '../../../environments/environment'; import { AddRatingEvent } from '../../catalog/models/add-rating-event'; import { Rating } from '../../catalog/models/rating'; @Injectable({ providedIn: 'root' }) export class RatingService { constructor(private readonly http: HttpClient) { } fetchRatingsForCatalogItem(catalogItemId: string) { return this.http.get<any>(`${environment.apiUrl}/api/v1/rating/${catalogItemId}`); } addRatingToCatalogItem({catalogItemId, rating}: AddRatingEvent) { return this.http.post<Rating>(`${environment.apiUrl}/api/v1/rating/${catalogItemId}`, {catalogItemId, rating}); } }
Development of Biliary and Enteral Stents by the Korean Gastrointestinal Endoscopists Stenting in the gastrointestinal tract is a common procedure used for palliation of obstruction in the enteral and biliary tract. Today, stenting of malignant and benign strictures is performed at almost every major tertiary hospital in Korea. Moreover, Korea has become a major global supplier of cutting edge technology in the field of self-expanding metal stents. However, the history of stenting in Korea is relatively short and was far behind that of other nations such as Japan and Germany. The authors are humbled and gratified to have been able to observe the development and application of these stents in Korea, first hand. In this article, the authors review the overall history of stenting with a specific focus on the development of stenting in Korea. The development of esophageal, gastroduodenal, biliary, and colonic stents in Korea are reviewed in this article from a chronological and historical point of view, and a personal account of some of the significant moments of stent development in Korea are described. INTRODUCTION Seeing a smile on the face of a long suffering patient is often taken for granted, but it is moments like this that refresh a physician on a daily basis, and is the driving force that pushes one constantly forward. In 1983, a junior endoscopist named Chan-Sup Shim, who worked at the Digestive Disease Center in Soonchunhyang University Hospital in Seoul, Korea, had gone to Japan to study under Dr. Masatsugu Nakajima, who is known as the pioneer for initiating the endoscopic biliary drainage technique by inserting a hand-made, large bore (10 Fr), biliary, plastic stent via a large channel duodenoscope (4.2 mm in diameter, Olympus TJF10; Olympus, Tokyo, Japan). After learning the procedure, he returned to Korea with hands on experience, and performed his first solo procedure of biliary stenting in a patient suffering from obstructive jaundice resulting from cholangiocarcinoma, in 1985. It was the first case of large bore biliary stenting that was introduced to the Korean Society of Gastrointestinal Endoscopy. 1 The results were dramatic and he was astonished by how rapidly the patients' symptoms were relieved. Medical technology has advanced greatly over the past 30 years in Korea. In contrast to the standardized stents of today, biliary stents of that time were designed and cut individually for each patient from a long polyurethane roll tube. The plastic roll tubes were placed over a steaming pot of water and both ends of the tube were stretched until gradually tapered, slender ends were formed. The tapered tubes would later be cut to just the right length for each patient and appropriate side holes and side flaps were created with a surgical blade (Fig. 1). Today, stents have undergone dramatic changes and are being routinely used in all fields of gastroenterology, such as esophageal, gastroduodenal, biliary, and colonic stenosis. It is well known that the Korean market is the leading developer of world-class gastrointestinal (GI) stents, notably the self-expandable metal stent (SEMS). The literature supporting the placement of stents in both be-nign and malignant conditions has increased exponentially, as also the variety of newly developed stents. The development of esophageal, gastroduodenal, biliary, and colonic stents in Korea will be reviewed in this article from a chronological and historical point of view. ESOPHAGEAL STENTING Today, esophageal stenting is a common practice for palliation of dysphagia caused by malignant disorders and for palliation of tracheoesophageal fistulae caused by either esophageal cancer or cancer of the lung. Esophageal stenting was first reported by Celestin 2 in 1959. The Celestin tube is a rigid latex prosthesis that was used for permanent esophageal intubation in inoperable cancer of the esophagus and cardia. The Celestin tube was originally placed surgically via an anterior gastrostomy, but later on was placed perorally under endoscopic guidance. However, the Celestin tube was fraught with complications and underwent several modifications, such as being made with softer rubber material, a slimmer design, or the addition of a distal flange. Additionally, the diameter of the Celestin tube was fixed. Therefore, the stricture had to be dilated prior to placement in many cases. 3 In 1977, Tytgat and den Hartog Jager 4 described a method using a small caliber endoscope for non-surgical peroral placement of a plastic esophageal prosthesis in patients with obstructing cardio-esophageal malignancy, or with pulmonary-esophageal fistulas. Although the reported success rate for initial deployment of these rigid stents was approximately 95%, the acute complication rates were extremely high. Complications like bleeding, perforation, tracheal compression, stent occlusion, migration, and fever were reported in as many as one-third of the cases. Most cases of fever, possibly occurred due to localized perforations, which were managed conservatively with parenteral antibiotics and limited oral intake. Late complications included bleeding, perforation, migration, tumor overgrowth, and aspiration. There were also several reports of the latex of the Celestin tube disintegrating, which in turn caused complications such as bleeding, perforation, and obstruction. 5 The first esophageal stenting in Korea was performed with a Celestin-Pulsion esophageal prosthesis by Shim et al. 6 in 1986 (Fig. 2). He had just returned from the Amsterdam medical center (AMC) and was able to procure some of these newly developed stents. Due to the high rate of complications, rigid stents were later inevitably replaced by SEMSs. Over the last 15 years, SEMSs have emerged as the treatment of choice for the palliation of malignant dysphagia. Indications for using SEMSs have expanded, while complications have reduced. Stent designs have undergone significant changes. SEMSs were initially introduced for use in the blood vessels and the biliary tree. However, once they were developed for use in the esophagus, they quickly and largely replaced neolumen-creating technologies. One of the first esophageal SEMSs to be developed was the EsophaCoil stent (InStent, Eden Prairie, MN, USA), a coilspring stent having a single-wire coiled configuration with two flanged ends, made from biocompatible nickel-titanium. However, there were several problems with this stent. Given its uncovered spring coil shape, repositioning was virtually impossible, once it was deployed. Tumor ingrowth was also an issue and this stent was soon withdrawn from the market. 7 However, the EsophaCoil stent marked an important change for future stenting procedures. Improved designs soon followed, the most noticeable being the wire mesh stent. SEMSs in general produce a radial force against a narrow lumen after deployment. This radial force is driven by the inherent shape memory properties of the compressed alloy that constitutes the stent itself. Today most esophageal SEMSs use nitinol due to its durability, shape memory ability, and relative conformability, compared to other metallic materials such as stainless steel. Currently, numerous, commercially available esophageal stents can be partially or completely covered. Covered stents were mainly used to prevent tumor ingrowth or for palliation of tracheoesophageal fistulas. The first human report of application of covered esophageal SEMS was by Professor Ho-Young Song in 1991 8 (Table 1). He used a modified Gianturco Z-stent (Wilson-Cook, Winston-Salem, NC, USA) with a silicone membrane for palliation of malignant esophagogastric strictures. 8 Modern esophageal stents are also diverse in shape and may come with distal or proximal flanges to prevent migration. The Choo stent, which is another covered esophageal SEMS (M.I. Tech, Seoul, Korea), was developed in 1995. This stent was covered with polyurethane, with 3-mm gaps between each metallic stent body part of stainless steel wire in a cylindrical zigzag fashion of 12 or 15 bends, so as to maximize its longitudinal flexibility. 9 Other features such as proximal retrieval lassos to enable a deployed stent to be removed and replaced via endoscopic grasping forceps have been introduced, and in the case of stents placed at the esophagogastric junction causing lower esophageal sphincter disruption, anti-reflux stents with a specialized valve have been developed. The Do stent, which is also a covered esophageal SEMS, but the first with an antireflux valve (M.I. Tech) made of three polyurethane leaflets attached to the distal part of the stent to prevent reflux, was developed in 1996. 10 inner SEMS is covered with a polyurethane membrane to prevent tumor ingrowth and the outer an uncovered SEMS to prevent migration. The mechanism of anti-migration in this double-layered esophageal stent is due to the wire mesh of the outer uncovered SEMS becoming embedded into the esophageal wall. 11 Professor Shim personally contributed to the development of some new types of covered esophageal SEMS in 2001 (Table 1). Of these, the most representative was the Shim-Hanaro stent and the Shim-Hanaro anti-reflux stent (M.I. Tech), which have recently been approved for use in the United States (Fig. 3A). 12 A cervical esophageal stent, which was designed with a short proximal flange for use in malignant cervical esophageal stenosis within 2 cm from the upper esophageal sphincter 13 (Fig. 3B) and Shim' s technique for fixation of partially covered esophageal stent with a string looped around the external ear for the prevention of stent migration, were also notable contributions (Fig. 3C). 14 This new design was able to overcome many of the problems. However, normal tissue reaction in response to the stent was still an issue, and this led to the development of a paclitaxel drug eluting stent. The response of this new stent was promising in animal studies. 15 Proximal-releasing stent insertion using a new delivery system (Teawoong Medical), which specializes in the release of the esophageal stent from its proximal end with direct-vision technique and use of transnasal endoscopy without fluoroscopy, was developed in 2006. The anchoring method, where the stent is connected with a silk thread, then looped around the patient's earlobe to prevent migration, is mainly used for the management of the patients, who had postoper- ative esophageal leakage problems. 16 Professor Shim recently designed a new fully covered, esophageal through-the-scope (TTS) stent, which can pass through the working channel of the endoscope for an easy insertion under direct endoscopic guidance. 17 BILIARY STENTING In the early 1980' s, professor Keiichi Kawai (Kyoto Prefectural University of Medicine, Kyoto, Japan), who was Dr. M. Nakajima's mentor, commissioned Olympus for a new large channel duodenoscope, that would be capable of passing a large 10 Fr (3.7 mm) plastic stent into the biliary tract. Professor Shim had trained in endoscopy under Dr. Nakajima's supervision in 1982 and was performing clinical trials on patients using plastic polyurethane stents, which Professor Kawai had introduced from Europe. The stent was in its last stages of production. Professor Shim was intrigued with these new and novel ideas and continued to stay in close contact with Dr. Nakajima long after his training period was over. Professor Shim hoped to bring this new technique to Korea (Table 1, Fig. 1) and eventually asked Dr. Nakajima for a priority supply of the large working channel diameter duodenoscope (TJF; Olympus, working channel, 4.2 mm in diameter), once it was released into the Japanese market. Dr. Nakajima sent Professor Shim the prototype, large channel, side-viewing duodenoscope, just before it was released in Japan. In 1979, Soehendra and Reynders-Frederix 18 first introduced endoscopic biliary drainage using a plastic stent. These early stents were hand-made and specifically cut for the individual patient. Today, plastic biliary stents are standardized and most stents are slightly curved to fit the contours of the common bile duct, so as to prevent stent migration. Plastic stents are made of polyethylene, teflon, or polyurethane. Sideholes are present at both ends of many stent models in order to maintain drainage, if the tip of the stent becomes impacted in the biliary or intestinal wall. Since, it has been suggested that these side holes favor sludge formation, models without side holes, but with multiple side flaps intended to prevent stent migration have been developed. These are known as "Tannenbaum" stents, the name being derived from the German word for fir tree. 19 The clogging tendency of plastic stents represents a major disadvantage, causing the patient to experience recurrent 7 mm 20 mm A B C jaundice and cholangitis. The risk of occlusion of standard polyethylene stents appears to increase progressively after 3 months, with an estimated median patency of 4 to 5 months for the 10 Fr plastic stents. Attempts to prolong stent patency, by the addition of an anti-reflux valve or by the use of different coatings on the stent surface, are being tested. 20 Compared to plastic biliary stents, biliary SEMSs have the advantage of expanding to a much larger diameter than the working channel of the endoscope used for insertion, thus enabling longer patency. However, SEMSs are more expensive and are generally more difficult to remove after deployment. Biliary SEMSs are available in various lengths ranging from 4 to 10 cm, with a stent diameter of 10 mm. They are provided in a constraining sheath, mounted on a delivery catheter and the SEMS expands and usually reaches its nominal length and diameter within a few hours or days after the removal of the constraining sheath. The diameter of the complete assembly ranges from 5.0 Fr (some Leufen stents) to 10.5 Fr (some Shim-Hanaro stents). Biliary SEMSs are also available with covered membranes made of various materials such as polyurethane, silicone, and polytetrafluoroethylene. The biliary SEMSs can be fully or partially covered like the esophageal SEMSs. The delivery catheter is slightly larger for covered SEMs compared to uncovered SEMSs. Professor Shim was the first to report a successful endoscopic application of Gianturco Z-stent in a patient with benign biliary stenosis in 1992 21 and the first to perform an endoscopic application of an uncovered biliary spiral stent (M.I. Tech), which was developed in 1993 (Table 1). Two uncovered biliary SEMSs along with their specialized delivery system for endoscopic placement were developed: the Hanaro biliary stent (M.I. Tech) in 1995 and the Niti-S biliary stent (Taewoong Medical) in 1998. Covered biliary SEMSs are used to prevent stent occlusion by tumor ingrowth and facilitate stent removal. Professor Shims' first experience with covered biliary SEMS was in 1997, when there were not commercially available. Professor Shim commissioned the application of a polyurethane membrane to a modified Gianturco biliary stent for palliation in patients with malignant biliary obstruction. Polyurethane was not available in Korea and following some research it was obtained from a chemical market in Osaka, Japan (Fig. 4). The preliminary results of this study were promising and led to the development of the Shim-Hanaro Stent (Fig. 5). 22 Today, it is still controversial, whether covered biliary SEMSs increase stent patency by preventing tumor ingrowth and although data are still limited, several studies have shown an improvement in stent patency with covered SEMSs and most manufacturers produced these models today. 23 The uncovered D-type biliary stent (Taewoong Medical) and the covered ComVi biliary stent (Taewoong Medical) were developed in the year 2000 24 and 2003, 25 respectively. The uncovered D-type biliary stent improved stent flexibility, while the covered ComVi biliary stent not only improved stent flexibility, but also reduced migration due to its weakened axial force and triple layered structure, which is an e-PTFE (expanded polytetrafluoroethylene) membrane sandwiched between two uncovered nitinol wires. Bilateral biliary stenting is technically challenging and for overcoming such difficulties in the TTS technique using conventional metal stents, several kinds of newly designed SEMSs have been recently introduced to facilitate passage of the second metal stent during bilateral stenting. The Y-type biliary Niti-S stent (Taewoong Medical), with a wider-mesh portion in the center and smaller-mesh portions at both ends, was developed in 2004. The joining of the Y-stent and the conventional metal stent through the wider-mesh portion of the Y-stent, produces a Y-shaped arrangement. The large opening in the wider-mesh wall of the first Y-stent permits easy passage of the delivery catheter for the second stent and its wide expansion. 26 There have been other stents, that have been developed for bilateral stenting (M-Hilar stent and K-Hilar Fig. 4. Hand-made membrane covered biliary self-expandable metal stent. Membrane-covered self-expandable biliary metal stent in expansion after uncoiling the string modified membrane-covered self-expandable biliary metal stent. Stent material, stainless steel 30 Fr; membrane, polyurethane; introducing apparatus, string pull type. stent; Standard Sci Tech, Seoul, Korea). The M-Hilar stent was designed by Professor Jong Ho Moon with a differently woven structure in the center, which permits easy passage of the delivery catheter of the second stent, even though it has no wider-mesh portion. 27 The K-Hilar stent is another special stent with a large hole in the center of the stent. Lumens of both stents are absolutely patent without blockage of the wire mesh, when this particular stent is used as the second stent Expandable diameter: 10 mm during bilateral stenting. Large cell D type biliary metal stent (Niti-S LCD biliary stent; Taewoong Medical) with a larger cell diameter of 6 mm, lower axial force, and optimal radial force, was developed to facilitate passage of a second SEMS through any portion of its wire mesh during the TTS technique. This is a simple and easy re-intervention in patients with malignant hilar obstruction. 28 Several fully covered SEMSs have been developed for the management of benign biliary and pancreatic stenosis, such as Bumpy stent (Taewoong Medical), 29 which is a new anti-migration design with uneven cell sizes, that is in a repetitive formation of one large and two small, and a fully covered SEMS with an anchoring flap (M.I. Tech), 30 which is another design of an anti-migration stent that has flaps at the distal end to prevent migration. The results of a comparison study between this stent and a conventional stent with flare ends revealed no migration in this stent, as compared with a 33% rate of migration in the conventional stent. 31 Fully covered intraductal SEMSs, such as M-intraductal BonaStent (Standard Sci Tech) 28 and Kaffes stent (Taewoong Medical), 32 were developed for the purpose of preventing sludge build-up, due to the reflux of the duodenal contents. It is advised that when this stent is used, endoscopic sphincterotomy should not be performed, since it is highly likely for reflux of the duodenal contents into the bile duct to occur. These intraductal SEMSs can be easily removed by pulling on the lasso attached to its distal end, with grasping forceps. 30 Several fully or partially covered SEMSs were specifically designed for endoscopic ultrasonography (EUS)-guided stenting by Professor Sang Soo Lee; Bona-AL stent (Standard Sci Tech) 33 A B C Pseudocyst stent (Standard Sci Tech) are examples of fully covered SEMSs, specifically designed for EUS-guided pancreatic pseudocystic drainage 35 Drug eluting metal stents covered with a paclitaxel-incorporated membrane (Niti-S Mira-cover biliary stent; Taewoong Medical) were developed by Professor Dong Ki Lee and have been used in humans since 2003, 36 resulting in it being technically feasible, safe, and effective in patients with malignant biliary obstruction. However, a prospective, comparative, pilot study between this stent and control covered metal stents, revealed no significant differences in the duration of stent patency and patient survival time. Therefore, further largescaled, prospective, comparative studies were recommended with a new SEMS having the ability to release the chemotherapeutic agent over an extended period of time. 37 STENTING OF THE STOMACH, DUODENUM, AND COLON Gastroduodenal stenting in Korea was first performed in a patient with gastric outlet obstruction due to advanced gastric cancer. However, the original introducing system of an esophageal metal stent (EsophaCoil) was too short to reach the duodenum. Therefore, two introducing systems were connected together to extend the total length, and inserted into the duodenum under fluoroscopy in 1995 (Fig. 6). 38 However, due to the angulation of the duodenum and the stiff nature of the stent, it was very difficult to insert, and the procedure was also time-consuming. Following renewed efforts, finally the TTS stent assembly (Niti-S Gastroduodenal stent; Taewoong Medical) using a vascular stent having an 18 mm diameter along with a novel homemade delivery system was born, and was used successfully in patients with gastric outlet obstruction in 1998. At the same time, the first successful TTS stenting procedure in the United States was being reported by Soetikno et al., 39 using an enteral Wallstent directly deployed through the endoscope for palliation of malignant gastric outlet obstruction. During the following 2 years, the TTS technique was refined and many physicians began to use the TTS technique in obstructive cancers of the stomach, duodenum, and even strictures of the small bowel. 40 Occasionally, when rectal or colon cancer completely obstructs the GI tract, the patient is unable to defecate or pass gas and he/she becomes extremely bloated, experiencing dyspnea. Stenting was never an indication for such patients. However, in 1988, Professor Shim decided to attempt colon stenting on patients with total obstructive inoperable rectal cancer using a plastic esophageal stent (Fig. 7). Professor Shim was later quoted as saying "I knew that it was a success as soon as I deployed the stent. The stench and fecal spray confirmed it. The euphoric expression on the patients face was more than enough to help forget the mess on the operating theater and myself. " The TTS technique later evolved to enable stenting of the proximal colon (Table 1, Fig. 8). Up to the early 2000' s, colonic stenting was limited to the palliation of the distal colon. More proximal colonic lesions are technically more difficult, as the tortuosity of the colon prevents advancement and positioning of the stent. In 2003, Professor Shim commissioned a partially covered and uncovered Niti-S stent, which was specifically developed for use in the colon ( Table 1). The stents consisted of wire-braided nitinol with proximal and distal flanged ends, 15 mm in length, 24 mm in diameter, and with a central body having a diameter of 18 mm. The TTS delivery system was 10.5 and 10 Fr for the covered and uncovered model, respectively. Using this new stent, acute proximal colonic obstruction was successfully managed in several patients (Fig. 8) 41 and it was now possible to stent obstructive cancers anywhere in the colon. Today, stenting is possible at any part of the GI tract, which can be reached by the endoscope. CONCLUSIONS The motivation for developing stents was always to improve the patient' s symptoms and quality of life. When facing a suffering patient, physicians must weigh numerous options and constantly question traditional methods. Modern medicine A B has advanced not only with the help of breakthrough technology, but also through questioning individuals, who through their numerous failures, have held on to the hope that they could ease the pain of patients and their families. If we had submitted to our environment and had not challenged what was already acknowledged, stenting of GI cancer may not be where it is today. Today, Korea has gained worldwide recognition for its cutting edge medical technology. We are confident that the future remains bright as long as physicians continue to be curious and we hope that this process of developing new stents for patients struggling with cancer will continue.
Etiology and Management of Male Iatrogenic Urethral Stricture: Retrospective Analysis of 172 Cases in a Single Medical Center Purpose: To investigate the etiology and management of male iatrogenic urethral stricture in China. Methods: The data of 172 patients with iatrogenic urethral stricture who underwent treatment at a high volume reference center in China from January 2008 to February 2014 were analyzed retrospectively. Databases were analyzed to understand the impact of different types of iatrogenic injury on stricture location, length and treatment of urethral strictures, as well as success rates. Results: The most common type of iatrogenic stricture was urethral instrumentations in 80 patients (46.51%). Mean stricture length was 3.3 ± 2.54 cm and the longest strictures were those caused by intravesical instillation. Substitution urethroplasty was the most common intervention and was performed in 60.47% (104/172) of patients. The overall success rate was 85.00% (136/160). Univariable analyses revealed that the type of iatrogenic injury was significantly related to restenosis (p = 0.036), and it is more apt to postoperative restenosis in the type of intravesical instillation than others. Conclusion: Our results showed that urethral instrumentation is the most common etiology of iatrogenic urethral stricture, and most iatrogenic urethral strictures involve the anterior urethra. The different etiologies are closely associated with stricture location, length and the overall prognosis of urethral strictures.
/// Read data from the device to fill the provided slice fn read(&mut self, data: &mut [u8]) -> I2CResult<()> { let len = data.len(); data.clone_from_slice(&self.registers[self.offset..(self.offset + len)]); println!("READ | 0x{:X} : {:?}", self.offset - data.len(), data); Ok(()) }
Sportsmans authority in the perspective of mans weakness Background: History of glory and fall of the cyclist Lance Armstrong refers to the issues from the borderline of sociology, psychology, pharmacology and qualified sport. It stirs many emotions and extreme opinions in which human attitudes are focused on phenomena impossible to be interpreted unequivocally. The purpose of this research was to learn opinions on the life path of Lance Armstrong, regarding his struggle both with cancer and his rivals in the peloton, but at the same time marked with forbidden doping. Material and methods: The research was carried out using the survey method, which asked one question about the ethics of behaviour of the former professional road racing cyclist. Results: People participating in the research had their own oncological experience and pursued the passion of sport-recreational activity. That means that in both areas their life refers to the path of one of the most recognizable sportsmen of the turn of centuries. In most cases, they find understanding for his behaviour. Conclusions: The main conclusion coming from the assessment of Armstrong concerns recognition that among people who have a similar history of life, most of them are able to indicate his numerous contributions to humanity, minimizing and sometimes even undervaluing shameful reputation, which disgraced professional sport.
// Alliswell - This is the value before generating JWT or save inside JWT export interface JwtPayload { user_id: string; role: string; }
What’s the easiest, almost-looks-fancy, toothpick related finger food you can make that doesn’t include pigs and blankets and pigs inside of said blankets? Vegan Cucumber Veggie Rolls. Raw ones. Or with cauliflower rice. Maybe. Damn girl, you look almost fancy. Also, ever since going full-blown vegetarian, I’ve learned to embrace vegetarian sushi. And maybe you’ve noticed but you probably haven’t noticed, avocado and peanut roll is a real thing. A real, real good thing. So clearly that’s going to be happening here. Cauliflower rice makes these kinda extra silly and fun to eat (and not raw) but it’s definitely not a requirement. The only requirement is that you have to prick them with toothpicks like a surgeon. That’s all. Use whatever veggie combinations you’re craving. I found these pickled asparagus spears at the grocery store (I say “grocery store” because I’m pretending I found some neighborhood gem instead of a Whole Foods in Manhattan) and they reminded me of all the pickled and marinated goodies in my favorite veggie sushi rolls. Raw Vegan Cucumber Veggie Rolls Ingredients: one cucumber, thinly sliced with either a mandoline or a vegetable peeler the following things are good starting point fillers when cut into adorable matchsticks: carrots, cucumber, avocado, pickled asparagus spears or green beans, marinated mushrooms, bell peppers other things that could be delightful stuffed in there: peanut butter, cream cheese (vegan cream cheese if you want it that way), spicy sriracha veganaise, lettuce some cauliflower florets if you want some good ol’ fake rice toothpicks This could honestly not be any easier: If you want cauliflower rice for some not-raw vegan cucumber veggie rolls, stick your cauliflower florets in a food processor. Pulse until it’s about the consistency of rice. Put in a microwave-safe bowl, cover, and microwave for 5 minutes. Fluff that shit. Lay out a strip of cucumber on your cutting board or whatever. If you want “rice,” pat some down on the cucumber strip, packing it ever so slightly and keeping a fair margin along the edges for spreading. If you want to keep the roll raw and “rice”less, disregard all of that nonsense you just read. Procede. On the left end of the strip of cucumber, lay your filling in a little stack or whatnot. Roll from left to right, keeping it as tight as you can. Stab with a toothpick. Put on a platter. Feel fancy. Eat. Advertisements
<filename>src/edu/kit/iks/cryptographics/caesar/view/demo/partial/Result.java /** * */ package edu.kit.iks.cryptographics.caesar.view.demo.partial; import java.util.AbstractMap.SimpleEntry; import java.util.List; import edu.kit.iks.cryptographicslib.framework.view.partial.AbstractPartialView; /** * @author <NAME> * */ public class Result extends AbstractPartialView { /** * Serial version UID. */ private static final long serialVersionUID = 4139653062499850190L; /** * @param variables */ public Result(List<SimpleEntry<String, String>> variables) { super(variables); } /* (non-Javadoc) * @see edu.kit.iks.cryptographicslib.framework.view.partial.AbstractPartialView#preparePartialView() */ @Override public void preparePartialView() { this.addText(this.getVariableValue("result")); } }
Performance of Rib Plate Hardware in an Elderly Woman Receiving Cardiopulmonary Resuscitation after Surgical Rib Fixation Rib fractures are a common injury in blunt thoracic trauma and contribute to significant morbidity and mortality. A review of the National Trauma Data Bank identified 64,750 patients with rib fractures having an overall mortality of 10 per cent. The mortality rate in this population increased with each additional rib fracture.1 Flail chest, defined as the presence of two or more fractures in three or more adjacent ribs leading to a free floating segment has been associated with the need for mechanical ventilation in 66 per cent of patients, development of acute respiratory distress syndrome in 27 per cent, and an overall mortality of 33 per cent.2 Surgical rib fixation (SRF) remains controversial. Benefit has been shown in certain patient populations, but the extent to which its practice should be routinely implemented has not been established. In patients with flail chest, studies have shown improved outcomes in patients undergoing SRF. A systematic review and meta-analysis performed by Leinicke et al.3 showed that patients undergoing operative fixation for flail chest had 4.5 fewer days of mechanical ventilation, 3.5 fewer days in the intensive care unit (ICU), and four fewer days in the hospital, compared with those who managed nonoperatively. Prospective studies are now being conducted to assess the benefits of and indications for SRF. Cardiopulmonary resuscitation (CPR) can be a lifesaving intervention with significant morbidity. Numerous studies have analyzed the incidence of rib fractures after CPR, ranging from 13 to 97 per cent, with select studies characterizing patterns and location of fractures along the rib cage after CPR.4 Most fractures occur in an anterior or lateral location along the middle of the thorax, which is also the zone in which our patient sustained her fractures after blunt trauma. Posterior rib fractures are notably rare after CPR, which was the predominate location of new fractures in our patient following SRF and CPR. We present the case of a 65-year-old female who presented to our academic level I trauma center after a motor vehicle crash. She was noted to be significantly altered upon arrival and was intubated for airway protection. She was found to have a traumatic brain injury, a fractured left clavicle, a left pulmonary contusion, a left hemopneumothorax, and multiple fractures of left-sided ribs 2 through 8 with a flail segment of ribs 4 and 5. A left-sided chest tube was placed and she was admitted to the ICU. By hospital day seven, it was felt that the patient could not be weaned from the ventilator secondary to her injuries and advanced age. In light of this, she was taken to the operating room for plating of her fractured left ribs and placement of a tracheostomy tube. The patient was noted to have a fracture pattern as described in Table 1, which also details the location of plate placement. MatrixRIB Fixation System plates and screws, manufactured by DePuy Synthes, West Chester, PA, were used for surgical fixation with one posterior intramedullary splint placed in rib five. This was chosen to address a free-floating segment that was located in a posterior position. The procedure was without complications and she returned to the ICU postoperatively. On postoperative day (POD) 1, her chest tube was placed to water seal, and on POD 3 her chest tube was removed with no residual pneumothorax. On POD 4 the patient was noted to be agitated and was suspected to have had an aspiration event. She became bradycardic and arrested with pulseless electrical activity. CPR was initiated, and the patient converted to normal sinus rhythm after one cycle. During the work-up for Address correspondence and reprint requests to Colston A. Edgerton, M.D., Department of Surgery, 96 Jonathan Lucas Street, CSB 416, Charleston, SC 29425. E-mail: [email protected]. This manuscript has not been previously published in whole or in part or submitted elsewhere for review. No funding contributed to the production of this manuscript. The authors have no disclosures or conflicts of interests to report.
Text Size A A Last year, chemists at the University of Manchester, in England, made headlines in the origin-of-life field by developing a recipe for pyrimidine ribonucleotides, which are two of the four fundamental units of RNA, that works under conditions thought to be feasible during Earth’s earliest days (C&EN, Dec. 21, 2009, page 37). Part of that team—John D. Sutherland, now at the University of Cambridge, and Matthew W. Powner, now a postdoctoral fellow with Jack W. Szostak at Harvard University—has found a three-component reaction that makes precursors of the pyrimidines’ purine counterparts (J. Am. Chem. Soc., DOI: 10.1021/ja108197s). The reaction runs in water and combines a variety of aldehydes (blue) with a 5-aminoimidazole (red) and a 2-aminooxazole (green). The imidazole can arise from cyanide tetramers and has been studied as a purine precursor, and the oxazole is a building block the team relied on in the previous study. “What’s key about this work is that it plays off the same chemistry that so robustly takes you to the pyrimidines,” because in the origin-of-life field it’s important to find chemical pathways by which the four ribonucleotides may have come about, explains RNA researcher Gerald F. Joyce of Scripps Research Institute.
Nira: view, review, and present GBytes-sized assets with interactive rendering on any device Nira is an asset review and collaboration platform capable of rendering massive 3D production files in real time for interactive web-based viewing on any device, including lower-powered mobile smartphones and tablets. Nira achieves this by employing a custom server-side asset ingestion pipeline, a custom server-side real time renderer, a collection of intuitive markup and review tools for artists/designers, and existing hardware video encode/decode capabilities of both server-side and client-side devices.
<filename>src/main/java/de/unistuttgart/ims/coref/annotator/document/op/RedoableOperation.java<gh_stars>10-100 package de.unistuttgart.ims.coref.annotator.document.op; public interface RedoableOperation extends Operation { }
<filename>substrate/frame/support/procedural/src/construct_runtime/expand/event.rs // This file is part of Substrate. // Copyright (C) 2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License use crate::construct_runtime::Pallet; use proc_macro2::TokenStream; use quote::quote; use syn::{Generics, Ident}; pub fn expand_outer_event( runtime: &Ident, pallet_decls: &[Pallet], scrate: &TokenStream, ) -> syn::Result<TokenStream> { let mut event_variants = TokenStream::new(); let mut event_conversions = TokenStream::new(); let mut query_event_part_macros = Vec::new(); for pallet_decl in pallet_decls { if let Some(pallet_entry) = pallet_decl.find_part("Event") { let path = &pallet_decl.path; let pallet_name = &pallet_decl.name; let index = pallet_decl.index; let instance = pallet_decl.instance.as_ref(); let generics = &pallet_entry.generics; if instance.is_some() && generics.params.is_empty() { let msg = format!( "Instantiable pallet with no generic `Event` cannot \ be constructed: pallet `{}` must have generic `Event`", pallet_name, ); return Err(syn::Error::new(pallet_name.span(), msg)) } let part_is_generic = !generics.params.is_empty(); let pallet_event = match (instance, part_is_generic) { (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), (Some(inst), false) => quote!(#path::Event::<#path::#inst>), (None, true) => quote!(#path::Event::<#runtime>), (None, false) => quote!(#path::Event), }; event_variants.extend(expand_event_variant( runtime, pallet_decl, index, instance, generics, )); event_conversions.extend(expand_event_conversion(scrate, pallet_decl, &pallet_event)); query_event_part_macros.push(quote! { #path::__substrate_event_check::is_event_part_defined!(#pallet_name); }); } } Ok(quote! { #( #query_event_part_macros )* #[derive( Clone, PartialEq, Eq, #scrate::codec::Encode, #scrate::codec::Decode, #scrate::RuntimeDebug, )] #[allow(non_camel_case_types)] pub enum Event { #event_variants } #event_conversions }) } fn expand_event_variant( runtime: &Ident, pallet: &Pallet, index: u8, instance: Option<&Ident>, generics: &Generics, ) -> TokenStream { let path = &pallet.path; let variant_name = &pallet.name; let part_is_generic = !generics.params.is_empty(); match instance { Some(inst) if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#runtime, #path::#inst>),) }, Some(inst) => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#path::#inst>),) }, None if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#runtime>),) }, None => { quote!(#[codec(index = #index)] #variant_name(#path::Event),) }, } } fn expand_event_conversion( scrate: &TokenStream, pallet: &Pallet, pallet_event: &TokenStream, ) -> TokenStream { let variant_name = &pallet.name; quote! { impl From<#pallet_event> for Event { fn from(x: #pallet_event) -> Self { Event::#variant_name(x) } } impl #scrate::sp_std::convert::TryInto<#pallet_event> for Event { type Error = (); fn try_into(self) -> #scrate::sp_std::result::Result<#pallet_event, Self::Error> { match self { Self::#variant_name(evt) => Ok(evt), _ => Err(()), } } } } }
import logging import random import uuid from pathlib import Path from typing import Generator import petname import pytest from beaker import exceptions from beaker.client import Beaker from beaker.data_model import * logger = logging.getLogger(__name__) def unique_name() -> str: return petname.generate() + "-" + str(uuid.uuid4())[:8] def beaker_object_fixture(client: Beaker, service: str): name = unique_name() service_client = getattr(client, service) not_found_exception = getattr(exceptions, f"{service.title()}NotFound") yield name try: logger.info("Attempting to remove %s '%s' from Beaker", service, name) service_client.delete(name) logger.info("Successfully deleted %s '%s' from Beaker", service, name) except not_found_exception: logger.info("%s '%s' not found on Beaker", service.title(), name) @pytest.fixture() def workspace_name() -> str: name = "ai2/beaker-py-testing" return name @pytest.fixture() def alternate_workspace_name() -> str: name = "ai2/beaker-py-testing-alternative" return name @pytest.fixture() def client(workspace_name): beaker_client = Beaker.from_env( session=True, default_workspace=workspace_name, default_org="ai2" ) return beaker_client @pytest.fixture() def alternate_workspace(client: Beaker, alternate_workspace_name: str) -> Workspace: return client.workspace.get(alternate_workspace_name) @pytest.fixture def beaker_org_name() -> str: return "ai2" @pytest.fixture() def beaker_org(client: Beaker, beaker_org_name: str) -> Organization: return client.organization.get(beaker_org_name) @pytest.fixture() def docker_image_name(client: Beaker): image = "hello-world" client.docker.images.pull(image) return image @pytest.fixture() def beaker_image_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "image") @pytest.fixture() def beaker_python_image_name() -> str: return "petew/python-3-10-alpine" @pytest.fixture() def alternate_beaker_image_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "image") @pytest.fixture() def beaker_cluster_name(client: Beaker) -> str: choices = [ "ai2/general-cirrascale", "ai2/allennlp-cirrascale", "ai2/aristo-cirrascale", "ai2/mosaic-cirrascale", "ai2/s2-cirrascale", ] random.shuffle(choices) for cluster in choices: utilization = client.cluster.utilization(cluster) if utilization.queued_jobs == 0: logger.info("Found suitable on-prem cluster '%s'", cluster) return cluster return "ai2/tiny-cpu-testing" @pytest.fixture() def beaker_cloud_cluster_name() -> str: return "ai2/tiny-cpu-testing" @pytest.fixture() def beaker_on_prem_cluster_name() -> str: return "ai2/allennlp-cirrascale" @pytest.fixture() def experiment_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "experiment") @pytest.fixture() def alternate_experiment_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "experiment") @pytest.fixture() def dataset_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "dataset") @pytest.fixture() def alternate_dataset_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "dataset") @pytest.fixture() def download_path(dataset_name, tmp_path) -> Path: path = tmp_path / dataset_name return path @pytest.fixture() def hello_world_experiment_name() -> str: return "hello-world" @pytest.fixture() def hello_world_experiment_id() -> str: return "01FPB5WGRTM33P5AE6A28MT8QF" @pytest.fixture() def hello_world_image_name() -> str: return "petew/hello-world" @pytest.fixture() def hello_world_job_id() -> str: return "01G0062R1K182CGR5559GHT5ED" @pytest.fixture() def beaker_node_id() -> str: return "01FXTYPFQ1QQ7XV4SH8VTCRZMG" @pytest.fixture() def secret_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "secret") @pytest.fixture() def archived_workspace_name() -> str: return "ai2/beaker-py-testing-archived" @pytest.fixture() def archived_workspace(client: Beaker, archived_workspace_name: str) -> Workspace: workspace = client.workspace.ensure(archived_workspace_name) if not workspace.archived: return client.workspace.archive(archived_workspace_name) else: return workspace @pytest.fixture() def squad_dataset_name() -> str: return "petew/squad-train" @pytest.fixture() def squad_dataset_file_name() -> str: return "squad-train.arrow" @pytest.fixture() def alternate_user(client: Beaker) -> Account: return client.account.get("epwalsh10") @pytest.fixture() def group_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "group") @pytest.fixture() def alternate_group_name(client: Beaker) -> Generator[str, None, None]: yield from beaker_object_fixture(client, "group") @pytest.fixture() def experiment_id_with_metrics() -> str: return "01G371J03VGJGK720TMZWFQNV3" @pytest.fixture() def experiment_id_with_results() -> str: return "01G371J03VGJGK720TMZWFQNV3"
package com.tbossi.aenigmata.puzzle.crossword; public interface BorderCell { public enum CellBorders { LEFT,RIGHT,TOP,BOTTOM; } public void addBorders(CellBorders... borders); public void removeBorders(CellBorders... borders); public CellBorders[] getBorders(); public boolean hasBorders(); public boolean hasBorder(CellBorders border); }
/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /*! Copyright (C) 2006, 2007 StatPro Italia srl This file is part of QuantLib, a free-software/open-source library for financial quantitative analysts and developers - http://quantlib.org/ QuantLib is free software: you can redistribute it and/or modify it under the terms of the QuantLib license. You should have received a copy of the license along with this program; if not, please email <<EMAIL>>. The license is also available online at <http://quantlib.org/license.shtml>. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the license for more details. */ /* This example showcases the CompositeInstrument class. Such class is used to build a static replication of a down-and-out barrier option, as outlined in Section 10.2 of <NAME>'s "The Concepts and Practice of Mathematical Finance" to which we refer the reader. */ #include <ql/qldefines.hpp> #ifdef BOOST_MSVC # include <ql/auto_link.hpp> #endif #include <ql/instruments/compositeinstrument.hpp> #include <ql/instruments/barrieroption.hpp> #include <ql/instruments/europeanoption.hpp> #include <ql/pricingengines/barrier/analyticbarrierengine.hpp> #include <ql/pricingengines/vanilla/analyticeuropeanengine.hpp> #include <ql/exercise.hpp> #include <ql/termstructures/yield/flatforward.hpp> #include <ql/termstructures/volatility/equityfx/blackconstantvol.hpp> #include <ql/quotes/simplequote.hpp> #include <ql/time/calendars/nullcalendar.hpp> #include <iostream> #include <iomanip> using namespace QuantLib; #if defined(QL_ENABLE_SESSIONS) namespace QuantLib { ThreadKey sessionId() { return {}; } } #endif int main(int, char* []) { try { std::cout << std::endl; Date today(29, May, 2006); Settings::instance().evaluationDate() = today; // the option to replicate Barrier::Type barrierType = Barrier::DownOut; Real barrier = 70.0; Real rebate = 0.0; Option::Type type = Option::Put; Real underlyingValue = 100.0; ext::shared_ptr<SimpleQuote> underlying( new SimpleQuote(underlyingValue)); Real strike = 100.0; ext::shared_ptr<SimpleQuote> riskFreeRate(new SimpleQuote(0.04)); ext::shared_ptr<SimpleQuote> volatility(new SimpleQuote(0.20)); Date maturity = today + 1*Years; std::cout << std::endl ; // write column headings Size widths[] = { 45, 15, 15 }; Size totalWidth = widths[0]+widths[1]+widths[2]; std::string rule(totalWidth, '-'), dblrule(totalWidth, '='); std::cout << dblrule << std::endl; std::cout << "Initial market conditions" << std::endl; std::cout << dblrule << std::endl; std::cout << std::setw(widths[0]) << std::left << "Option" << std::setw(widths[1]) << std::left << "NPV" << std::setw(widths[2]) << std::left << "Error" << std::endl; std::cout << rule << std::endl; // bootstrap the yield/vol curves DayCounter dayCounter = Actual365Fixed(); Handle<Quote> h1(riskFreeRate); Handle<Quote> h2(volatility); Handle<YieldTermStructure> flatRate( ext::shared_ptr<YieldTermStructure>( new FlatForward(0, NullCalendar(), h1, dayCounter))); Handle<BlackVolTermStructure> flatVol( ext::shared_ptr<BlackVolTermStructure>( new BlackConstantVol(0, NullCalendar(), h2, dayCounter))); // instantiate the option ext::shared_ptr<Exercise> exercise( new EuropeanExercise(maturity)); ext::shared_ptr<StrikedTypePayoff> payoff( new PlainVanillaPayoff(type, strike)); ext::shared_ptr<BlackScholesProcess> bsProcess( new BlackScholesProcess(Handle<Quote>(underlying), flatRate, flatVol)); ext::shared_ptr<PricingEngine> barrierEngine( new AnalyticBarrierEngine(bsProcess)); ext::shared_ptr<PricingEngine> europeanEngine( new AnalyticEuropeanEngine(bsProcess)); BarrierOption referenceOption(barrierType, barrier, rebate, payoff, exercise); referenceOption.setPricingEngine(barrierEngine); Real referenceValue = referenceOption.NPV(); std::cout << std::setw(widths[0]) << std::left << "Original barrier option" << std::fixed << std::setw(widths[1]) << std::left << referenceValue << std::setw(widths[2]) << std::left << "N/A" << std::endl; // Replicating portfolios CompositeInstrument portfolio1, portfolio2, portfolio3; // Final payoff first (the same for all portfolios): // as shown in Joshi, a put struck at K... ext::shared_ptr<Instrument> put1( new EuropeanOption(payoff, exercise)); put1->setPricingEngine(europeanEngine); portfolio1.add(put1); portfolio2.add(put1); portfolio3.add(put1); // ...minus a digital put struck at B of notional K-B... ext::shared_ptr<StrikedTypePayoff> digitalPayoff( new CashOrNothingPayoff(Option::Put, barrier, 1.0)); ext::shared_ptr<Instrument> digitalPut( new EuropeanOption(digitalPayoff, exercise)); digitalPut->setPricingEngine(europeanEngine); portfolio1.subtract(digitalPut, strike-barrier); portfolio2.subtract(digitalPut, strike-barrier); portfolio3.subtract(digitalPut, strike-barrier); // ...minus a put option struck at B. ext::shared_ptr<StrikedTypePayoff> lowerPayoff( new PlainVanillaPayoff(Option::Put, barrier)); ext::shared_ptr<Instrument> put2( new EuropeanOption(lowerPayoff, exercise)); put2->setPricingEngine(europeanEngine); portfolio1.subtract(put2); portfolio2.subtract(put2); portfolio3.subtract(put2); // Now we use puts struck at B to kill the value of the // portfolio on a number of points (B,t). For the first // portfolio, we'll use 12 dates at one-month's distance. Integer i; for (i=12; i>=1; i--) { // First, we instantiate the option... Date innerMaturity = today + i*Months; ext::shared_ptr<Exercise> innerExercise( new EuropeanExercise(innerMaturity)); ext::shared_ptr<StrikedTypePayoff> innerPayoff( new PlainVanillaPayoff(Option::Put, barrier)); ext::shared_ptr<Instrument> putn( new EuropeanOption(innerPayoff, innerExercise)); putn->setPricingEngine(europeanEngine); // ...second, we evaluate the current portfolio and the // latest put at (B,t)... Date killDate = today + (i-1)*Months; Settings::instance().evaluationDate() = killDate; underlying->setValue(barrier); Real portfolioValue = portfolio1.NPV(); Real putValue = putn->NPV(); // ...finally, we estimate the notional that kills the // portfolio value at that point... Real notional = portfolioValue/putValue; // ...and we subtract from the portfolio a put with such // notional. portfolio1.subtract(putn, notional); } // The portfolio being complete, we return to today's market... Settings::instance().evaluationDate() = today; underlying->setValue(underlyingValue); // ...and output the value. Real portfolioValue = portfolio1.NPV(); Real error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (12 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; // For the second portfolio, we'll use 26 dates at two-weeks' // distance. for (i=52; i>=2; i-=2) { // Same as above. Date innerMaturity = today + i*Weeks; ext::shared_ptr<Exercise> innerExercise( new EuropeanExercise(innerMaturity)); ext::shared_ptr<StrikedTypePayoff> innerPayoff( new PlainVanillaPayoff(Option::Put, barrier)); ext::shared_ptr<Instrument> putn( new EuropeanOption(innerPayoff, innerExercise)); putn->setPricingEngine(europeanEngine); Date killDate = today + (i-2)*Weeks; Settings::instance().evaluationDate() = killDate; underlying->setValue(barrier); Real portfolioValue = portfolio2.NPV(); Real putValue = putn->NPV(); Real notional = portfolioValue/putValue; portfolio2.subtract(putn, notional); } Settings::instance().evaluationDate() = today; underlying->setValue(underlyingValue); portfolioValue = portfolio2.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (26 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; // For the third portfolio, we'll use 52 dates at one-week's // distance. for (i=52; i>=1; i--) { // Same as above. Date innerMaturity = today + i*Weeks; ext::shared_ptr<Exercise> innerExercise( new EuropeanExercise(innerMaturity)); ext::shared_ptr<StrikedTypePayoff> innerPayoff( new PlainVanillaPayoff(Option::Put, barrier)); ext::shared_ptr<Instrument> putn( new EuropeanOption(innerPayoff, innerExercise)); putn->setPricingEngine(europeanEngine); Date killDate = today + (i-1)*Weeks; Settings::instance().evaluationDate() = killDate; underlying->setValue(barrier); Real portfolioValue = portfolio3.NPV(); Real putValue = putn->NPV(); Real notional = portfolioValue/putValue; portfolio3.subtract(putn, notional); } Settings::instance().evaluationDate() = today; underlying->setValue(underlyingValue); portfolioValue = portfolio3.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (52 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; // Now we modify the market condition to see whether the // replication holds. First, we change the underlying value so // that the option is out of the money. std::cout << dblrule << std::endl; std::cout << "Modified market conditions: out of the money" << std::endl; std::cout << dblrule << std::endl; std::cout << std::setw(widths[0]) << std::left << "Option" << std::setw(widths[1]) << std::left << "NPV" << std::setw(widths[2]) << std::left << "Error" << std::endl; std::cout << rule << std::endl; underlying->setValue(110.0); referenceValue = referenceOption.NPV(); std::cout << std::setw(widths[0]) << std::left << "Original barrier option" << std::fixed << std::setw(widths[1]) << std::left << referenceValue << std::setw(widths[2]) << std::left << "N/A" << std::endl; portfolioValue = portfolio1.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (12 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; portfolioValue = portfolio2.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (26 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; portfolioValue = portfolio3.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (52 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; // Next, we change the underlying value so that the option is // in the money. std::cout << dblrule << std::endl; std::cout << "Modified market conditions: in the money" << std::endl; std::cout << dblrule << std::endl; std::cout << std::setw(widths[0]) << std::left << "Option" << std::setw(widths[1]) << std::left << "NPV" << std::setw(widths[2]) << std::left << "Error" << std::endl; std::cout << rule << std::endl; underlying->setValue(90.0); referenceValue = referenceOption.NPV(); std::cout << std::setw(widths[0]) << std::left << "Original barrier option" << std::fixed << std::setw(widths[1]) << std::left << referenceValue << std::setw(widths[2]) << std::left << "N/A" << std::endl; portfolioValue = portfolio1.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (12 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; portfolioValue = portfolio2.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (26 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; portfolioValue = portfolio3.NPV(); error = portfolioValue - referenceValue; std::cout << std::setw(widths[0]) << std::left << "Replicating portfolio (52 dates)" << std::fixed << std::setw(widths[1]) << std::left << portfolioValue << std::setw(widths[2]) << std::left << error << std::endl; // Finally, a word of warning for those (shame on them) who // run the example but do not read the code. std::cout << dblrule << std::endl; std::cout << std::endl << "The replication seems to be less robust when volatility and \n" << "risk-free rate are changed. Feel free to experiment with \n" << "the example and contribute a patch if you spot any errors." << std::endl; return 0; } catch (std::exception& e) { std::cerr << e.what() << std::endl; return 1; } catch (...) { std::cerr << "unknown error" << std::endl; return 1; } }