code
stringlengths
4
1.01M
language
stringclasses
2 values
/* * Copyright 2014 BrightTag, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.brighttag.agathon.dao; import javax.annotation.Nullable; import com.google.common.collect.ImmutableSet; import com.brighttag.agathon.model.CassandraInstance; /** * DAO for Cassandra Instances. * * @author codyaray * @since 5/12/2012 */ public interface CassandraInstanceDao { /** * Returns the set of Cassandra instances in a ring. * * @param ring name of the Cassandra ring * @return set of Cassandra instances in the ring * @throws BackingStoreException if there was a problem communicating with the backing store. */ ImmutableSet<CassandraInstance> findAll(String ring) throws BackingStoreException; /** * Returns the Cassandra instance with the given {@code id} or {@code null} if not found. * * @param ring name of the Cassandra ring * @param id the Cassandra instance ID * @return the Cassandra instance or {@code null} if not found * @throws BackingStoreException if there was a problem communicating with the backing store. */ @Nullable CassandraInstance findById(String ring, int id) throws BackingStoreException; /** * Saves the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void save(String ring, CassandraInstance instance); /** * Deletes the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void delete(String ring, CassandraInstance instance); }
Java
/* * MainActivity.java * * Copyright (C) 2013 6 Wunderkinder GmbH. * * @author Jose L Ugia - @Jl_Ugia * @author Antonio Consuegra - @aconsuegra * @author Cesar Valiente - @CesarValiente * @author Benedikt Lehnert - @blehnert * @author Timothy Achumba - @iam_timm * @version 1.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wunderlist.slidinglayersample; import android.annotation.SuppressLint; import android.app.Activity; import android.content.SharedPreferences; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.MenuItem; import android.view.View; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; import com.wunderlist.slidinglayer.LayerTransformer; import com.wunderlist.slidinglayer.SlidingLayer; import com.wunderlist.slidinglayer.transformer.AlphaTransformer; import com.wunderlist.slidinglayer.transformer.RotationTransformer; import com.wunderlist.slidinglayer.transformer.SlideJoyTransformer; public class MainActivity extends Activity { private SlidingLayer mSlidingLayer; private TextView swipeText; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); bindViews(); initState(); } @SuppressLint("NewApi") @Override protected void onResume() { super.onResume(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { getActionBar().setDisplayHomeAsUpEnabled(true); } } /** * View binding */ private void bindViews() { mSlidingLayer = (SlidingLayer) findViewById(R.id.slidingLayer1); swipeText = (TextView) findViewById(R.id.swipeText); } /** * Initializes the origin state of the layer */ private void initState() { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); setupSlidingLayerPosition(prefs.getString("layer_location", "right")); setupSlidingLayerTransform(prefs.getString("layer_transform", "none")); setupShadow(prefs.getBoolean("layer_has_shadow", false)); setupLayerOffset(prefs.getBoolean("layer_has_offset", false)); setupPreviewMode(prefs.getBoolean("preview_mode_enabled", false)); } private void setupSlidingLayerPosition(String layerPosition) { LayoutParams rlp = (LayoutParams) mSlidingLayer.getLayoutParams(); int textResource; Drawable d; switch (layerPosition) { case "right": textResource = R.string.swipe_right_label; d = getResources().getDrawable(R.drawable.container_rocket_right); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_RIGHT); break; case "left": textResource = R.string.swipe_left_label; d = getResources().getDrawable(R.drawable.container_rocket_left); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_LEFT); break; case "top": textResource = R.string.swipe_up_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_TOP); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); break; default: textResource = R.string.swipe_down_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_BOTTOM); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); } d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight()); swipeText.setCompoundDrawables(null, d, null, null); swipeText.setText(getResources().getString(textResource)); mSlidingLayer.setLayoutParams(rlp); } private void setupSlidingLayerTransform(String layerTransform) { LayerTransformer transformer; switch (layerTransform) { case "alpha": transformer = new AlphaTransformer(); break; case "rotation": transformer = new RotationTransformer(); break; case "slide": transformer = new SlideJoyTransformer(); break; default: return; } mSlidingLayer.setLayerTransformer(transformer); } private void setupShadow(boolean enabled) { if (enabled) { mSlidingLayer.setShadowSizeRes(R.dimen.shadow_size); mSlidingLayer.setShadowDrawable(R.drawable.sidebar_shadow); } else { mSlidingLayer.setShadowSize(0); mSlidingLayer.setShadowDrawable(null); } } private void setupLayerOffset(boolean enabled) { int offsetDistance = enabled ? getResources().getDimensionPixelOffset(R.dimen.offset_distance) : 0; mSlidingLayer.setOffsetDistance(offsetDistance); } private void setupPreviewMode(boolean enabled) { int previewOffset = enabled ? getResources().getDimensionPixelOffset(R.dimen.preview_offset_distance) : -1; mSlidingLayer.setPreviewOffsetDistance(previewOffset); } public void buttonClicked(View v) { switch (v.getId()) { case R.id.buttonOpen: mSlidingLayer.openLayer(true); break; case R.id.buttonClose: mSlidingLayer.closeLayer(true); break; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (mSlidingLayer.isOpened()) { mSlidingLayer.closeLayer(true); return true; } default: return super.onKeyDown(keyCode, event); } } @Override public boolean onOptionsItemSelected(MenuItem item) { finish(); return true; } }
Java
\begin{ManPage}{\label{man-condor-cold-start}\Condor{cold\_start}}{1} {install and start Condor on this machine} \Synopsis \SynProg{\Condor{cold\_start}} \Opt{-help} \SynProg{\Condor{cold\_start}} \oOptArg{-basedir}{directory} \oOpt{-force} %\oOpt{-dyn} \oOpt{\Opt{-setuponly} \Bar \Opt{-runonly}} \oOptArg{-arch}{architecture} \oOptArg{-site}{repository} \oOptArg{-localdir}{directory} \oOptArg{-runlocalconfig}{file} \oOptArg{-logarchive}{archive} \oOptArg{-spoolarchive}{archive} \oOptArg{-execarchive}{archive} \oOpt{-filelock} \oOpt{-pid} \oOptArg{-artifact}{filename} \oOpt{-wget} \oOptArg{-globuslocation}{directory} \OptArg{-configfile}{file} \index{Condor commands!condor\_cold\_start} \index{Deployment commands!condor\_cold\_start} \index{condor\_cold\_start} \Description \Condor{cold\_start} installs and starts Condor on this machine, setting up or using a predefined configuration. In addition, it has the functionality to determine the local architecture if one is not specified. Additionally, this program can install pre-made \File{log}, \File{execute}, and/or \File{spool} directories by specifying the archived versions. \begin{Options} \OptItem{\OptArg{-arch}{architecturestr}}{ Use the given \Arg{architecturestr} to fetch the installation package. The string is in the format: \Sinful{condor\_version}-\Sinful{machine\_arch}-\Sinful{os\_name}-\Sinful{os\_version} (for example 6.6.7-i686-Linux-2.4). The portion of this string \Sinful{condor\_version} may be replaced with the string "latest" (for example, latest-i686-Linux-2.4) to substitute the most recent version of Condor. } \OptItem{\OptArg{-artifact}{filename}}{ Use \Arg{filename} for name of the artifact file used to determine whether the \Condor{master} daemon is still alive. } \OptItem{\OptArg{-basedir}{directory}}{ The directory to install or find the Condor executables and libraries. When not specified, the current working directory is assumed. } % \OptItem{\Opt{-dyn}}{ % Use dynamic names for the log, spool, and execute directories, as % well as the binding configuration file. This option can be used % to run multiple instances of condor in the same local directory. % This option cannot be used with \Opt{-*archive} options. The % dynamic names are created by appending the IP address and process % id of the master to the file names. % } \OptItem{\OptArg{-execarchive}{archive}}{ Create the Condor \File{execute} directory from the given \Arg{archive} file. } \OptItem{\Opt{-filelock}}{ Specifies that this program should use a POSIX file lock midwife program to create an artifact of the birth of a \Condor{master} daemon. A file lock undertaker can later be used to determine whether the \Condor{master} daemon has exited. This is the preferred option when the user wants to check the status of the \Condor{master} daemon from another machine that shares a distributed file system that supports POSIX file locking, for example, AFS. } \OptItem{\Opt{-force}}{ Overwrite previously installed files, if necessary. } \OptItem{\OptArg{-globuslocation}{directory}}{ The location of the globus installation on this machine. When not specified \File{/opt/globus} is the directory used. This option is only necessary when other options of the form \Opt{-*archive} are specified. } \OptItem{\Opt{-help}}{ Display brief usage information and exit. } \OptItem{\OptArg{-localdir}{directory}}{ The directory where the Condor \File{log}, \File{spool}, and \File{execute} directories will be installed. Each running instance of Condor must have its own local directory. % or the dynamic naming option must be enabled. } \OptItem{\OptArg{-logarchive}{archive}}{ Create the Condor log directory from the given \Arg{archive} file. } \OptItem{\Opt{-pid}}{ This program is to use a unique process id midwife program to create an artifact of the birth of a \Condor{master} daemon. A unique pid undertaker can later be used to determine whether the \Condor{master} daemon has exited. This is the default option and the preferred method to check the status of the \Condor{master} daemon from the same machine it was started on. } \OptItem{\OptArg{-runlocalconfig}{file}}{ A special local configuration file bound into the Condor configuration at runtime. This file only affects the instance of Condor started by this command. No other Condor instance sharing the same global configuration file will be affected. } \OptItem{\Opt{-runonly}}{ Run Condor from the specified installation directory without installing it. It is possible to run several instantiations of Condor from a single installation. } \OptItem{\Opt{-setuponly}}{ Install Condor without running it. } \OptItem{\OptArg{-site}{repository}}{ The ftp, http, gsiftp, or mounted file system directory where the installation packages can be found (for example, \File{www.cs.example.edu/packages/coldstart}). } \OptItem{\OptArg{-spoolarchive}{archive}}{ Create the Condor spool directory from the given \Arg{archive} file. } \OptItem{\Opt{-wget}}{ Use \Prog{wget} to fetch the \File{log}, \File{spool}, and \File{execute} directories, if other options of the form \Opt{-*archive} are specified. \Prog{wget} must be installed on the machine and in the user's path. } \OptItem{\OptArg{-configfile}{file}}{ A required option to specify the Condor configuration file to use for this installation. This file can be located on an http, ftp, or gsiftp site, or alternatively on a mounted file system. } \end{Options} \ExitStatus \Condor{cold\_start} will exit with a status value of 0 (zero) upon success, and non-zero otherwise. \Examples To start a Condor installation on the current machine, using \texttt{http://www.example.com/Condor/deployment} as the installation site: \footnotesize \begin{verbatim} % condor_cold_start \ -configfile http://www.example.com/Condor/deployment/condor_config.mobile \ -site http://www.example.com/Condor/deployment \end{verbatim} \normalsize Optionally if this instance of Condor requires a local configuration file \File{condor\_config.local}: \footnotesize \begin{verbatim} % condor_cold_start \ -configfile http://www.example.com/Condor/deployment/condor_config.mobile \ -site http://www.example.com/Condor/deployment \ -runlocalconfig condor_config.local \end{verbatim} \normalsize \SeeAlso \Condor{cold\_stop} (on page~\pageref{man-condor-cold-stop}), \Prog{filelock\_midwife} (on page~\pageref{man-filelock-midwife}), \Prog{uniq\_pid\_midwife} (on page~\pageref{man-uniq-pid-midwife}). \end{ManPage}
Java
--- title: 'Webapp idea: #nowplaying radio' category: Projects tags: - twitter - programming - Ruby - Ruby on Rails --- An oppurtunity to learn Ruby on Rails. Use a Twitter library to fetch tweets with the hashtag #nowplaying. Present the user with an interface with exactly one button: Play Parse Twitters results and fetch songs from youtube (embedded player, ajax/iframe) User clicks play, the #nowplaying radio begins. Ideas for later: add features such as result narrowing, sharing etc. # Update: I actually coded this during the day and got some insight into Rails. The coding part was actually quite easy, taking into account that I&#039;d never met Ruby before, but the real obstacle was deployment. Heroku is awesome, really, but the beginner - I - failed miserably with dependencies (twitter and youtube gems). Long story short, 2 hours of messing with git, Gemfile and bundle and I gave up. The app works, but only in localhost. Maybe I&#039;ll get help later. ![Screenshot of the now playing app]({{ site.url }}/content/2011/02/nowplaying.png)
Java
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; /** * @class * Initializes a new instance of the USqlExternalDataSourceList class. * @constructor * A Data Lake Analytics catalog U-SQL external datasource item list. * */ class USqlExternalDataSourceList extends Array { constructor() { super(); } /** * Defines the metadata of USqlExternalDataSourceList * * @returns {object} metadata of USqlExternalDataSourceList * */ mapper() { return { required: false, serializedName: 'USqlExternalDataSourceList', type: { name: 'Composite', className: 'USqlExternalDataSourceList', modelProperties: { nextLink: { required: false, serializedName: 'nextLink', type: { name: 'String' } }, value: { required: false, readOnly: true, serializedName: '', type: { name: 'Sequence', element: { required: false, serializedName: 'USqlExternalDataSourceElementType', type: { name: 'Composite', className: 'USqlExternalDataSource' } } } } } } }; } } module.exports = USqlExternalDataSourceList;
Java
{base}, Standard as ( SELECT distinct SOURCE_CODE, TARGET_CONCEPT_ID, TARGET_DOMAIN_ID, SOURCE_VALID_START_DATE, SOURCE_VALID_END_DATE FROM Source_to_Standard WHERE lower(SOURCE_VOCABULARY_ID) IN ('jnj_tru_p_spclty') AND (TARGET_STANDARD_CONCEPT IS NOT NULL or TARGET_STANDARD_CONCEPT != '') AND (TARGET_INVALID_REASON IS NULL or TARGET_INVALID_REASON = '') ) select distinct Standard.* from Standard
Java
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ name: 'LogEntryView', package: 'foam.flow', extends: 'foam.flow.Element', constants: { ELEMENT_NAME: 'log-entry' }, properties: [ { name: 'data', // type: 'foam.flow.LogEntry' } ], templates: [ function toInnerHTML() {/* <num>{{this.data.id}}</num><{{{this.data.mode}}}>{{this.data.contents}}</{{{this.data.mode}}}> */}, function CSS() {/* log-entry { display: flex; } log-entry > num { min-width: 35px; max-width: 35px; display: inline-block; text-align: right; padding-right: 13px; font-weight: bold; -webkit-touch-callout: none; -webkit-user-select: none; -khtml-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; background: #E0E0E0; } log-entry > log, log-entry > warn, log-entry > error { padding-left: 4px; white-space: pre-wrap; } log-entry > log { color: #333; } log-entry > warn { color: #CC9900; } log-entry > error { color: #C00; } */} ] });
Java
(function() { function LandingCtrl() { this.heroTitle = "Turn the Music Up!"; } angular .module('blocJams') .controller('LandingCtrl', LandingCtrl); })();
Java
<include file="Index/header" /> <style type="text/css"> </style> <!-- 右边内容 START --> <div class="col-xs-12 col-sm-12 col-md-12 col-lg-12 " style="padding:20px"> <if condition="$userinfo['wechat_card_num'] - $thisUser['wechat_card_num'] gt 0 " > <a class="btn btn-primary" onclick="location.href='{gr-:U('Index/add')}';">添加微信公众号</a> <span class="text-info" >您还可以创建{gr-$userinfo['wechat_card_num'] - $thisUser['wechat_card_num']}个微信公众号</span> <else /> </if> <!-- 公众号列表 START--> <div class=" " > <table class="table table-condensed table-bordered table-striped" border="0" cellSpacing="0" cellPadding="0" width="100%"> <thead> <tr> <th>公众号名称</th> <th style="text-align:center">用户组</th> <th>到期时间</th> <!-- <th>已定义/上限</th> --> <!-- <th>请求数</th> --> <th>操作</th> </tr> </thead> <tbody> <tr></tr> <volist name="info" id="vo"> <tr> <td><p><a href="{gr-:U('Function/index',array('id'=>$vo['id'],'token'=>$vo['token']))}" title="点击进入功能管理"><img src="{gr-$vo.headerpic}" width="40" height="40"></a></p><p>{gr-$vo.wxname}</p></td> <td align="center">{gr-$thisGroup.name}</td> <td>{gr-$viptime|date="Y-m-d",###} <!-- <a href="###" onclick="alert('请联系我们,电话0575-89974522')" id="smemberss" class="btn btn-flat btn-link btn-sm text-info"><em>如何续费</em></a> --></td> <td class="norightborder"> <a class="btn btn-lg btn-primary" href="{gr-:U('Function/index',array('id'=>$vo['id'],'token'=>$vo['token']))}" class="btn btn-primary" >进入管理</a> <a target="_blank" href="{gr-:U('Home/Index/bind',array('token'=>$vo['token'],'encodingaeskey'=>$vo['encodingaeskey']))}" class="btn btn-primary btn-sm" >绑定公众号</a> <a class="btn btn-warning btn-sm" href="{gr-:U('Index/edit',array('id'=>$vo['id']))}"><i class="mdi-editor-mode-edit"></i>编辑</a> <a href="javascript:drop_confirm('您确定要删除吗?', '{gr-:U('Index/del',array('id'=>$vo['id']))}');" class="btn btn-danger btn-sm"><i class="mdi-action-delete"></i>删除</a> </td> </tr> </volist> </tbody> </table> </div> <!-- 公众号列表 END--> <!-- 分页 START --> <div class="pageNavigator right"> <div class="pages"></div> </div> <!-- 分页 END--> </div> <include file="Public/footer"/>
Java
#!/bin/bash # This script runs in a loop (configurable with LOOP), checks for updates to the # Hugo docs theme or to the docs on certain branches and rebuilds the public # folder for them. It has be made more generalized, so that we don't have to # hardcode versions. # Warning - Changes should not be made on the server on which this script is running # becauses this script does git checkout and merge. set -e GREEN='\033[32;1m' RESET='\033[0m' HOST="${HOST:-https://dgraph.io/docs/badger}" # Name of output public directory PUBLIC="${PUBLIC:-public}" # LOOP true makes this script run in a loop to check for updates LOOP="${LOOP:-true}" # Binary of hugo command to run. HUGO="${HUGO:-hugo}" # TODO - Maybe get list of released versions from Github API and filter # those which have docs. # Place the latest version at the beginning so that version selector can # append '(latest)' to the version string, followed by the master version, # and then the older versions in descending order, such that the # build script can place the artifact in an appropriate location. VERSIONS_ARRAY=( 'master' ) joinVersions() { versions=$(printf ",%s" "${VERSIONS_ARRAY[@]}") echo "${versions:1}" } function version { echo "$@" | gawk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }'; } rebuild() { echo -e "$(date) $GREEN Updating docs for branch: $1.$RESET" # The latest documentation is generated in the root of /public dir # Older documentations are generated in their respective `/public/vx.x.x` dirs dir='' if [[ $2 != "${VERSIONS_ARRAY[0]}" ]]; then dir=$2 fi VERSION_STRING=$(joinVersions) # In Unix environments, env variables should also be exported to be seen by Hugo export CURRENT_BRANCH=${1} export CURRENT_VERSION=${2} export VERSIONS=${VERSION_STRING} HUGO_TITLE="Badger Doc ${2}"\ VERSIONS=${VERSION_STRING}\ CURRENT_BRANCH=${1}\ CURRENT_VERSION=${2} ${HUGO} \ --destination="${PUBLIC}"/"$dir"\ --baseURL="$HOST"/"$dir" 1> /dev/null } branchUpdated() { local branch="$1" git checkout -q "$1" UPSTREAM=$(git rev-parse "@{u}") LOCAL=$(git rev-parse "@") if [ "$LOCAL" != "$UPSTREAM" ] ; then git merge -q origin/"$branch" return 0 else return 1 fi } publicFolder() { dir='' if [[ $1 == "${VERSIONS_ARRAY[0]}" ]]; then echo "${PUBLIC}" else echo "${PUBLIC}/$1" fi } checkAndUpdate() { local version="$1" local branch="" if [[ $version == "master" ]]; then branch="master" else branch="release/$version" fi if branchUpdated "$branch" ; then git merge -q origin/"$branch" rebuild "$branch" "$version" fi folder=$(publicFolder "$version") if [ "$firstRun" = 1 ] || [ "$themeUpdated" = 0 ] || [ ! -d "$folder" ] ; then rebuild "$branch" "$version" fi } firstRun=1 while true; do # Lets move to the docs directory. pushd "$(dirname "$0")/.." > /dev/null currentBranch=$(git rev-parse --abbrev-ref HEAD) # Lets check if the theme was updated. pushd themes/hugo-docs > /dev/null git remote update > /dev/null themeUpdated=1 if branchUpdated "master" ; then echo -e "$(date) $GREEN Theme has been updated. Now will update the docs.$RESET" themeUpdated=0 fi popd > /dev/null # Now lets check the theme. echo -e "$(date) Starting to check branches." git remote update > /dev/null for version in "${VERSIONS_ARRAY[@]}" do checkAndUpdate "$version" done echo -e "$(date) Done checking branches.\n" git checkout -q "$currentBranch" popd > /dev/null firstRun=0 if ! $LOOP; then exit fi sleep 60 done
Java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * ClientInfo.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: #axisVersion# #today# */ package org.apache.axis2.databinding; import org.apache.axiom.om.OMFactory; import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; /** ClientInfo bean class */ public class ClientInfo implements org.apache.axis2.databinding.ADBBean { /* This type was generated from the piece of schema that had name = ClientInfo Namespace URI = http://www.wso2.com/types Namespace Prefix = ns1 */ public ClientInfo(String localName, String localSsn) { this.localName = localName; this.localSsn = localSsn; } public ClientInfo() { } /** field for Name */ protected java.lang.String localName; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getName() { return localName; } /** * Auto generated setter method * * @param param Name */ public void setName(java.lang.String param) { this.localName = param; } /** field for Ssn */ protected java.lang.String localSsn; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getSsn() { return localSsn; } /** * Auto generated setter method * * @param param Ssn */ public void setSsn(java.lang.String param) { this.localSsn = param; } /** databinding method to get an XML representation of this object */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) { java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "name")); elementList .add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName)); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "ssn")); elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSsn)); return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl (qName, elementList.toArray(), attribList.toArray()); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException, ADBException { serialize(parentQName,factory,xmlWriter,false); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws XMLStreamException, ADBException { throw new UnsupportedOperationException("Un implemented method"); } /** Factory class that keeps the parse method */ public static class Factory { /** static method to create the object */ public static ClientInfo parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { ClientInfo object = new ClientInfo(); try { int event = reader.getEventType(); int count = 0; int argumentCount = 2; boolean done = false; //event better be a START_ELEMENT. if not we should go up to the start element here while (!reader.isStartElement()) { event = reader.next(); } while (!done) { if (javax.xml.stream.XMLStreamConstants.START_ELEMENT == event) { if ("name".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setName( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } if ("ssn".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setSsn( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } } if (argumentCount == count) { done = true; } if (!done) { event = reader.next(); } } } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
Java
npm install (cd ./client/ && npm install)
Java
<?php require_once '../autoload.php'; use Qiniu\Auth; $accessKey = 'Access_Key'; $secretKey = 'Secret_Key'; $auth = new Auth($accessKey, $secretKey); $bucket = 'Bucket_Name'; $upToken = $auth->uploadToken($bucket); echo $upToken;
Java
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.datavec.api.transform.transform.time; import lombok.Data; import lombok.EqualsAndHashCode; import org.datavec.api.transform.ColumnType; import org.datavec.api.transform.Transform; import org.datavec.api.transform.metadata.ColumnMetaData; import org.datavec.api.transform.metadata.IntegerMetaData; import org.datavec.api.transform.metadata.StringMetaData; import org.datavec.api.transform.metadata.TimeMetaData; import org.datavec.api.transform.schema.Schema; import org.datavec.api.util.jackson.DateTimeFieldTypeDeserializer; import org.datavec.api.util.jackson.DateTimeFieldTypeSerializer; import org.datavec.api.writable.IntWritable; import org.datavec.api.writable.Text; import org.datavec.api.writable.Writable; import org.joda.time.DateTime; import org.joda.time.DateTimeFieldType; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.nd4j.shade.jackson.annotation.JsonIgnore; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonInclude; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize; import org.nd4j.shade.jackson.databind.annotation.JsonSerialize; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * Create a number of new columns by deriving their values from a Time column. * Can be used for example to create new columns with the year, month, day, hour, minute, second etc. * * @author Alex Black */ @JsonIgnoreProperties({"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @EqualsAndHashCode(exclude = {"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @Data public class DeriveColumnsFromTimeTransform implements Transform { private final String columnName; private final String insertAfter; private DateTimeZone inputTimeZone; private final List<DerivedColumn> derivedColumns; private Schema inputSchema; private int insertAfterIdx = -1; private int deriveFromIdx = -1; private DeriveColumnsFromTimeTransform(Builder builder) { this.derivedColumns = builder.derivedColumns; this.columnName = builder.columnName; this.insertAfter = builder.insertAfter; } public DeriveColumnsFromTimeTransform(@JsonProperty("columnName") String columnName, @JsonProperty("insertAfter") String insertAfter, @JsonProperty("inputTimeZone") DateTimeZone inputTimeZone, @JsonProperty("derivedColumns") List<DerivedColumn> derivedColumns) { this.columnName = columnName; this.insertAfter = insertAfter; this.inputTimeZone = inputTimeZone; this.derivedColumns = derivedColumns; } @Override public Schema transform(Schema inputSchema) { List<ColumnMetaData> oldMeta = inputSchema.getColumnMetaData(); List<ColumnMetaData> newMeta = new ArrayList<>(oldMeta.size() + derivedColumns.size()); List<String> oldNames = inputSchema.getColumnNames(); for (int i = 0; i < oldMeta.size(); i++) { String current = oldNames.get(i); newMeta.add(oldMeta.get(i)); if (insertAfter.equals(current)) { //Insert the derived columns here for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: newMeta.add(new StringMetaData(d.columnName)); break; case Integer: newMeta.add(new IntegerMetaData(d.columnName)); //TODO: ranges... if it's a day, we know it must be 1 to 31, etc... break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return inputSchema.newSchema(newMeta); } @Override public void setInputSchema(Schema inputSchema) { insertAfterIdx = inputSchema.getColumnNames().indexOf(insertAfter); if (insertAfterIdx == -1) { throw new IllegalStateException( "Invalid schema/insert after column: input schema does not contain column \"" + insertAfter + "\""); } deriveFromIdx = inputSchema.getColumnNames().indexOf(columnName); if (deriveFromIdx == -1) { throw new IllegalStateException( "Invalid source column: input schema does not contain column \"" + columnName + "\""); } this.inputSchema = inputSchema; if (!(inputSchema.getMetaData(columnName) instanceof TimeMetaData)) throw new IllegalStateException("Invalid state: input column \"" + columnName + "\" is not a time column. Is: " + inputSchema.getMetaData(columnName)); TimeMetaData meta = (TimeMetaData) inputSchema.getMetaData(columnName); inputTimeZone = meta.getTimeZone(); } @Override public Schema getInputSchema() { return inputSchema; } @Override public List<Writable> map(List<Writable> writables) { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() + "). Transform = " + toString()); } int i = 0; Writable source = writables.get(deriveFromIdx); List<Writable> list = new ArrayList<>(writables.size() + derivedColumns.size()); for (Writable w : writables) { list.add(w); if (i++ == insertAfterIdx) { for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: list.add(new Text(d.dateTimeFormatter.print(source.toLong()))); break; case Integer: DateTime dt = new DateTime(source.toLong(), inputTimeZone); list.add(new IntWritable(dt.get(d.fieldType))); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return list; } @Override public List<List<Writable>> mapSequence(List<List<Writable>> sequence) { List<List<Writable>> out = new ArrayList<>(sequence.size()); for (List<Writable> step : sequence) { out.add(map(step)); } return out; } /** * Transform an object * in to another object * * @param input the record to transform * @return the transformed writable */ @Override public Object map(Object input) { List<Object> ret = new ArrayList<>(); Long l = (Long) input; for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: ret.add(d.dateTimeFormatter.print(l)); break; case Integer: DateTime dt = new DateTime(l, inputTimeZone); ret.add(dt.get(d.fieldType)); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } return ret; } /** * Transform a sequence * * @param sequence */ @Override public Object mapSequence(Object sequence) { List<Long> longs = (List<Long>) sequence; List<List<Object>> ret = new ArrayList<>(); for (Long l : longs) ret.add((List<Object>) map(l)); return ret; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DeriveColumnsFromTimeTransform(timeColumn=\"").append(columnName).append("\",insertAfter=\"") .append(insertAfter).append("\",derivedColumns=("); boolean first = true; for (DerivedColumn d : derivedColumns) { if (!first) sb.append(","); sb.append(d); first = false; } sb.append("))"); return sb.toString(); } /** * The output column name * after the operation has been applied * * @return the output column name */ @Override public String outputColumnName() { return outputColumnNames()[0]; } /** * The output column names * This will often be the same as the input * * @return the output column names */ @Override public String[] outputColumnNames() { String[] ret = new String[derivedColumns.size()]; for (int i = 0; i < ret.length; i++) ret[i] = derivedColumns.get(i).columnName; return ret; } /** * Returns column names * this op is meant to run on * * @return */ @Override public String[] columnNames() { return new String[] {columnName()}; } /** * Returns a singular column name * this op is meant to run on * * @return */ @Override public String columnName() { return columnName; } public static class Builder { private final String columnName; private String insertAfter; private final List<DerivedColumn> derivedColumns = new ArrayList<>(); /** * @param timeColumnName The name of the time column from which to derive the new values */ public Builder(String timeColumnName) { this.columnName = timeColumnName; this.insertAfter = timeColumnName; } /** * Where should the new columns be inserted? * By default, they will be inserted after the source column * * @param columnName Name of the column to insert the derived columns after */ public Builder insertAfter(String columnName) { this.insertAfter = columnName; return this; } /** * Add a String column (for example, human readable format), derived from the time * * @param columnName Name of the new/derived column * @param format Joda time format, as per <a href="http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html">http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html</a> * @param timeZone Timezone to use for formatting */ public Builder addStringDerivedColumn(String columnName, String format, DateTimeZone timeZone) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.String, format, timeZone, null)); return this; } /** * Add an integer derived column - for example, the hour of day, etc. Uses timezone from the time column metadata * * @param columnName Name of the column * @param type Type of field (for example, DateTimeFieldType.hourOfDay() etc) */ public Builder addIntegerDerivedColumn(String columnName, DateTimeFieldType type) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.Integer, null, null, type)); return this; } /** * Create the transform instance */ public DeriveColumnsFromTimeTransform build() { return new DeriveColumnsFromTimeTransform(this); } } @JsonInclude(JsonInclude.Include.NON_NULL) @EqualsAndHashCode(exclude = "dateTimeFormatter") @Data @JsonIgnoreProperties({"dateTimeFormatter"}) public static class DerivedColumn implements Serializable { private final String columnName; private final ColumnType columnType; private final String format; private final DateTimeZone dateTimeZone; @JsonSerialize(using = DateTimeFieldTypeSerializer.class) @JsonDeserialize(using = DateTimeFieldTypeDeserializer.class) private final DateTimeFieldType fieldType; private transient DateTimeFormatter dateTimeFormatter; // public DerivedColumn(String columnName, ColumnType columnType, String format, DateTimeZone dateTimeZone, DateTimeFieldType fieldType) { public DerivedColumn(@JsonProperty("columnName") String columnName, @JsonProperty("columnType") ColumnType columnType, @JsonProperty("format") String format, @JsonProperty("dateTimeZone") DateTimeZone dateTimeZone, @JsonProperty("fieldType") DateTimeFieldType fieldType) { this.columnName = columnName; this.columnType = columnType; this.format = format; this.dateTimeZone = dateTimeZone; this.fieldType = fieldType; if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(this.format).withZone(dateTimeZone); } @Override public String toString() { return "(name=" + columnName + ",type=" + columnType + ",derived=" + (format != null ? format : fieldType) + ")"; } //Custom serialization methods, because Joda Time doesn't allow DateTimeFormatter objects to be serialized :( private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(format).withZone(dateTimeZone); } } }
Java
package com.humbinal.ssm.test; public class User { private long user_Id; private String user_name; private int user_age; public User() { } public long getUser_Id() { return user_Id; } public void setUser_Id(long user_Id) { this.user_Id = user_Id; } public String getUser_name() { return user_name; } public void setUser_name(String user_name) { this.user_name = user_name; } public int getUser_age() { return user_age; } public void setUser_age(int user_age) { this.user_age = user_age; } }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_10) on Mon Feb 26 03:04:16 CET 2007 --> <TITLE> S-Index (REPSI Tool) </TITLE> <LINK REL ="stylesheet" TYPE="text/css" HREF="../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="S-Index (REPSI Tool)"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Package</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Index</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="index-11.html"><B>PREV LETTER</B></A>&nbsp; &nbsp;<A HREF="index-13.html"><B>NEXT LETTER</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../index.html?index-filesindex-12.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="index-12.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <A HREF="index-1.html">C</A> <A HREF="index-2.html">D</A> <A HREF="index-3.html">E</A> <A HREF="index-4.html">F</A> <A HREF="index-5.html">G</A> <A HREF="index-6.html">I</A> <A HREF="index-7.html">M</A> <A HREF="index-8.html">N</A> <A HREF="index-9.html">O</A> <A HREF="index-10.html">P</A> <A HREF="index-11.html">R</A> <A HREF="index-12.html">S</A> <A HREF="index-13.html">T</A> <HR> <A NAME="_S_"><!-- --></A><H2> <B>S</B></H2> <DL> <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SEPARATOR_COMMA_SPACE_SINGLE_QUOTE"><B>SEPARATOR_COMMA_SPACE_SINGLE_QUOTE</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>Separator - comma, space & single quote. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SEPARATOR_SINGLE_QUOTE_COMMA_SPACE_SINGLE_QUOTE"><B>SEPARATOR_SINGLE_QUOTE_COMMA_SPACE_SINGLE_QUOTE</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>Separator - single quote, comma, space & single quote. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedEndAction(java.util.Date, java.util.Date, long)"><B>setAppliedEndAction(Date, Date, long)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>APPLIED_DURATION_MICRO_SECOND</code>, <code>APPLIED_END_TIME</code>, and <code>APPLIED_START_TIME</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedEndActionError(java.lang.String)"><B>setAppliedEndActionError(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>APPLIED_ERROR_MESSAGE</code> and <code>APPLIED_STATUS</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedStartAction(java.lang.String, java.lang.String)"><B>setAppliedStartAction(String, String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>APPLIED_PATTERN_SELECT_STMNT</code> and <code>APPLIED_STATUS</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setComparison(java.lang.String, java.lang.String)"><B>setComparison(String, String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A> <DD>Updates in the database the columns <code>COMPARISON_EQUALS</code> and <code>COMPARISON_MESSAGE</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setComparison(java.lang.String, java.lang.String)"><B>setComparison(String, String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>COMPARISON_EQUALS</code> and <code>COMPARISON_MESSAGE</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setDescription(java.lang.String)"><B>setDescription(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A> <DD>Updates in the database the columns <code>DESCRIPTION</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html#setDescription(java.lang.String)"><B>setDescription(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunMapper</A> <DD>Updates in the database the columns <code>DESCRIPTION</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setDescription(java.lang.String, int)"><B>setDescription(String, int)</B></A> - Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A> <DD>Sets the description at the required position. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/DatabaseAccessor.html#setFetchSize(int)"><B>setFetchSize(int)</B></A> - Method in class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/DatabaseAccessor.html" title="class in edu.ou.weinmann.repsi.model.util">DatabaseAccessor</A> <DD>Creates a <code>Statement</code> object associtated with this <code>Connnection</code> object. <DT><A HREF="../edu/ou/weinmann/repsi/model/calibration/Calibration.html#setObject(java.lang.String)"><B>setObject(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.calibration.<A HREF="../edu/ou/weinmann/repsi/model/calibration/Calibration.html" title="class in edu.ou.weinmann.repsi.model.calibration">Calibration</A> <DD>Sets the type of the <code>Calibration</code> object. <DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setOrderBy(java.lang.String, int)"><B>setOrderBy(String, int)</B></A> - Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A> <DD>Sets the order by clause at the required position. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setPatternSqlIdiomName(java.lang.String)"><B>setPatternSqlIdiomName(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A> <DD>Updates in the database the columns <code>PATTERN_SQL_IDIOM_NAME</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Configurator.html#setProperty(java.lang.String, java.lang.String)"><B>setProperty(String, String)</B></A> - Method in class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Configurator.html" title="class in edu.ou.weinmann.repsi.model.util">Configurator</A> <DD>sets the value of a given property key. <DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setSelectStmnt(java.lang.String, int)"><B>setSelectStmnt(String, int)</B></A> - Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A> <DD>Sets the <code>SELECT</code> statement at the required position. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setSequenceNumberAction(long)"><B>setSequenceNumberAction(long)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Sets the current action sequence number. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunProtocolMapper.html#setSequenceNumberAction(long)"><B>setSequenceNumberAction(long)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunProtocolMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunProtocolMapper</A> <DD>Sets the current action sequence number. <DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setSqlSyntaxCode(java.lang.String)"><B>setSqlSyntaxCode(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A> <DD>Sets the <code>SQL</code> syntax code. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setSqlSyntaxCodeTqp(java.lang.String)"><B>setSqlSyntaxCodeTqp(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A> <DD>Updates in the database the columns <code>SQL_SYNTAX_CODE_TTQP</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/database/Database.html#setSqlSyntaxSource(java.lang.String)"><B>setSqlSyntaxSource(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.database.<A HREF="../edu/ou/weinmann/repsi/model/database/Database.html" title="class in edu.ou.weinmann.repsi.model.database">Database</A> <DD>Sets the type of the SQL syntax version. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setStatus()"><B>setStatus()</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A> <DD>Updates in the database the columns <code>END_TIME</code> and <code>STATUS_CODE</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html#setStatus(java.lang.String)"><B>setStatus(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunMapper</A> <DD>Updates in the database the columns <code>END_TIME</code> and <code>STATUS_CODE</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setTableName(java.lang.String)"><B>setTableName(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the column <code>TABLE_NAME</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedEndAction(java.util.Date, java.util.Date, long)"><B>setUnappliedEndAction(Date, Date, long)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>UNAPPLIED_DURATION_MICRO_SECOND</code>, <code>UNAPPLIED_END_TIME</code>, and <code>UNAPPLIED_START_TIME</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedEndActionError(java.lang.String)"><B>setUnappliedEndActionError(String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>UNAPPLIED_ERROR_MESSAGE</code> and <code>UNAPPLIED_STATUS</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedStartAction(java.lang.String, java.lang.String)"><B>setUnappliedStartAction(String, String)</B></A> - Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A> <DD>Updates in the database the columns <code>UNAPPLIED_PATTERN_SELECT_STMNT</code> and <code>UNAPPLIED_STATUS</code>. <DT><A HREF="../edu/ou/weinmann/repsi/model/trial/metadata/Columns.html#sizeColumns()"><B>sizeColumns()</B></A> - Method in class edu.ou.weinmann.repsi.model.trial.metadata.<A HREF="../edu/ou/weinmann/repsi/model/trial/metadata/Columns.html" title="class in edu.ou.weinmann.repsi.model.trial.metadata">Columns</A> <DD>Returns the number of columns in this database table. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_COLUMN_TYPE_CHAR"><B>SQL_COLUMN_TYPE_CHAR</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>SQL column type - CHAR. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_COLUMN_TYPE_VARCHAR2"><B>SQL_COLUMN_TYPE_VARCHAR2</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>SQL column type - VARCHAR2. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_SYNTAX_CODE_ORACLE_10G"><B>SQL_SYNTAX_CODE_ORACLE_10G</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>SQL syntax code - Oracle 10g Release 2. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_SYNTAX_CODE_SQL_99"><B>SQL_SYNTAX_CODE_SQL_99</B></A> - Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A> <DD>SQL syntax code - standard SQL:1999. <DT><A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html" title="class in edu.ou.weinmann.repsi.model.util"><B>SQLRewriter</B></A> - Class in <A HREF="../edu/ou/weinmann/repsi/model/util/package-summary.html">edu.ou.weinmann.repsi.model.util</A><DD>Adapts the syntactical variations of different SQL versions by rewriting the SQL statements.<DT><A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html#SQLRewriter()"><B>SQLRewriter()</B></A> - Constructor for class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html" title="class in edu.ou.weinmann.repsi.model.util">SQLRewriter</A> <DD>Constructs a <code>SQLRewriter</code> object. <DT><A HREF="../edu/ou/weinmann/repsi/model/database/Database.html#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)"><B>startElement(String, String, String, Attributes)</B></A> - Method in class edu.ou.weinmann.repsi.model.database.<A HREF="../edu/ou/weinmann/repsi/model/database/Database.html" title="class in edu.ou.weinmann.repsi.model.database">Database</A> <DD>Receive notification of the start of an element. </DL> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Package</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Index</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="index-11.html"><B>PREV LETTER</B></A>&nbsp; &nbsp;<A HREF="index-13.html"><B>NEXT LETTER</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../index.html?index-filesindex-12.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="index-12.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <A HREF="index-1.html">C</A> <A HREF="index-2.html">D</A> <A HREF="index-3.html">E</A> <A HREF="index-4.html">F</A> <A HREF="index-5.html">G</A> <A HREF="index-6.html">I</A> <A HREF="index-7.html">M</A> <A HREF="index-8.html">N</A> <A HREF="index-9.html">O</A> <A HREF="index-10.html">P</A> <A HREF="index-11.html">R</A> <A HREF="index-12.html">S</A> <A HREF="index-13.html">T</A> <HR> </BODY> </HTML>
Java
/** Copyright (c) 2013 The Chromium Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. **/ require("../base/extension_registry.js"); require("./event.js"); require("./object_snapshot.js"); require("../base/range.js"); require("../base/sorted_array_utils.js"); 'use strict'; /** * @fileoverview Provides the ObjectSnapshot and ObjectHistory classes. */ global.tr.exportTo('tr.model', function() { var ObjectSnapshot = tr.model.ObjectSnapshot; /** * An object with a specific id, whose state has been snapshotted several * times. * * @constructor */ function ObjectInstance( parent, id, category, name, creationTs, opt_baseTypeName) { tr.model.Event.call(this); this.parent = parent; this.id = id; this.category = category; this.baseTypeName = opt_baseTypeName ? opt_baseTypeName : name; this.name = name; this.creationTs = creationTs; this.creationTsWasExplicit = false; this.deletionTs = Number.MAX_VALUE; this.deletionTsWasExplicit = false; this.colorId = 0; this.bounds = new tr.b.Range(); this.snapshots = []; this.hasImplicitSnapshots = false; } ObjectInstance.prototype = { __proto__: tr.model.Event.prototype, get typeName() { return this.name; }, addBoundsToRange: function(range) { range.addRange(this.bounds); }, addSnapshot: function(ts, args, opt_name, opt_baseTypeName) { if (ts < this.creationTs) throw new Error('Snapshots must be >= instance.creationTs'); if (ts >= this.deletionTs) throw new Error('Snapshots cannot be added after ' + 'an objects deletion timestamp.'); var lastSnapshot; if (this.snapshots.length > 0) { lastSnapshot = this.snapshots[this.snapshots.length - 1]; if (lastSnapshot.ts == ts) throw new Error('Snapshots already exists at this time!'); if (ts < lastSnapshot.ts) { throw new Error( 'Snapshots must be added in increasing timestamp order'); } } // Update baseTypeName if needed. if (opt_name && (this.name != opt_name)) { if (!opt_baseTypeName) throw new Error('Must provide base type name for name update'); if (this.baseTypeName != opt_baseTypeName) throw new Error('Cannot update type name: base types dont match'); this.name = opt_name; } var snapshotConstructor = tr.model.ObjectSnapshot.getConstructor( this.category, this.name); var snapshot = new snapshotConstructor(this, ts, args); this.snapshots.push(snapshot); return snapshot; }, wasDeleted: function(ts) { var lastSnapshot; if (this.snapshots.length > 0) { lastSnapshot = this.snapshots[this.snapshots.length - 1]; if (lastSnapshot.ts > ts) throw new Error( 'Instance cannot be deleted at ts=' + ts + '. A snapshot exists that is older.'); } this.deletionTs = ts; this.deletionTsWasExplicit = true; }, /** * See ObjectSnapshot constructor notes on object initialization. */ preInitialize: function() { for (var i = 0; i < this.snapshots.length; i++) this.snapshots[i].preInitialize(); }, /** * See ObjectSnapshot constructor notes on object initialization. */ initialize: function() { for (var i = 0; i < this.snapshots.length; i++) this.snapshots[i].initialize(); }, getSnapshotAt: function(ts) { if (ts < this.creationTs) { if (this.creationTsWasExplicit) throw new Error('ts must be within lifetime of this instance'); return this.snapshots[0]; } if (ts > this.deletionTs) throw new Error('ts must be within lifetime of this instance'); var snapshots = this.snapshots; var i = tr.b.findIndexInSortedIntervals( snapshots, function(snapshot) { return snapshot.ts; }, function(snapshot, i) { if (i == snapshots.length - 1) return snapshots[i].objectInstance.deletionTs; return snapshots[i + 1].ts - snapshots[i].ts; }, ts); if (i < 0) { // Note, this is a little bit sketchy: this lets early ts point at the // first snapshot, even before it is taken. We do this because raster // tasks usually post before their tile snapshots are dumped. This may // be a good line of code to re-visit if we start seeing strange and // confusing object references showing up in the traces. return this.snapshots[0]; } if (i >= this.snapshots.length) return this.snapshots[this.snapshots.length - 1]; return this.snapshots[i]; }, updateBounds: function() { this.bounds.reset(); this.bounds.addValue(this.creationTs); if (this.deletionTs != Number.MAX_VALUE) this.bounds.addValue(this.deletionTs); else if (this.snapshots.length > 0) this.bounds.addValue(this.snapshots[this.snapshots.length - 1].ts); }, shiftTimestampsForward: function(amount) { this.creationTs += amount; if (this.deletionTs != Number.MAX_VALUE) this.deletionTs += amount; this.snapshots.forEach(function(snapshot) { snapshot.ts += amount; }); }, get userFriendlyName() { return this.typeName + ' object ' + this.id; } }; tr.model.EventRegistry.register( ObjectInstance, { name: 'objectInstance', pluralName: 'objectInstances', singleViewElementName: 'tr-ui-a-single-object-instance-sub-view', multiViewElementName: 'tr-ui-a-multi-object-sub-view' }); var options = new tr.b.ExtensionRegistryOptions( tr.b.TYPE_BASED_REGISTRY_MODE); options.mandatoryBaseClass = ObjectInstance; options.defaultConstructor = ObjectInstance; tr.b.decorateExtensionRegistry(ObjectInstance, options); return { ObjectInstance: ObjectInstance }; });
Java
// Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package graphx import ( "encoding/json" "fmt" "github.com/apache/beam/sdks/go/pkg/beam/core/graph/coder" "github.com/apache/beam/sdks/go/pkg/beam/core/runtime/graphx/v1" "github.com/apache/beam/sdks/go/pkg/beam/core/typex" "github.com/apache/beam/sdks/go/pkg/beam/core/util/protox" pb "github.com/apache/beam/sdks/go/pkg/beam/model/pipeline_v1" "github.com/golang/protobuf/proto" ) const ( // Model constants urnBytesCoder = "beam:coder:bytes:v1" urnVarIntCoder = "beam:coder:varint:v1" urnLengthPrefixCoder = "beam:coder:length_prefix:v1" urnKVCoder = "beam:coder:kv:v1" urnIterableCoder = "beam:coder:iterable:v1" urnWindowedValueCoder = "beam:coder:windowed_value:v1" urnGlobalWindow = "beam:coder:global_window:v1" urnIntervalWindow = "beam:coder:interval_window:v1" // SDK constants urnCustomCoder = "beam:go:coder:custom:v1" urnCoGBKList = "beam:go:coder:cogbklist:v1" // CoGBK representation. Not a coder. ) // MarshalCoders marshals a list of coders into model coders. func MarshalCoders(coders []*coder.Coder) ([]string, map[string]*pb.Coder) { b := NewCoderMarshaller() ids := b.AddMulti(coders) return ids, b.Build() } // UnmarshalCoders unmarshals coders. func UnmarshalCoders(ids []string, m map[string]*pb.Coder) ([]*coder.Coder, error) { b := NewCoderUnmarshaller(m) var coders []*coder.Coder for _, id := range ids { c, err := b.Coder(id) if err != nil { return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err) } coders = append(coders, c) } return coders, nil } // CoderUnmarshaller is an incremental unmarshaller of model coders. Identical // coders are shared. type CoderUnmarshaller struct { models map[string]*pb.Coder coders map[string]*coder.Coder windowCoders map[string]*coder.WindowCoder } // NewCoderUnmarshaller returns a new CoderUnmarshaller. func NewCoderUnmarshaller(m map[string]*pb.Coder) *CoderUnmarshaller { return &CoderUnmarshaller{ models: m, coders: make(map[string]*coder.Coder), windowCoders: make(map[string]*coder.WindowCoder), } } func (b *CoderUnmarshaller) Coders(ids []string) ([]*coder.Coder, error) { coders := make([]*coder.Coder, len(ids)) for i, id := range ids { c, err := b.Coder(id) if err != nil { return nil, err } coders[i] = c } return coders, nil } // Coder unmarshals a coder with the given id. func (b *CoderUnmarshaller) Coder(id string) (*coder.Coder, error) { if c, exists := b.coders[id]; exists { return c, nil } c, ok := b.models[id] if !ok { return nil, fmt.Errorf("coder with id %v not found", id) } ret, err := b.makeCoder(c) if err != nil { return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err) } b.coders[id] = ret return ret, nil } // WindowCoder unmarshals a window coder with the given id. func (b *CoderUnmarshaller) WindowCoder(id string) (*coder.WindowCoder, error) { if w, exists := b.windowCoders[id]; exists { return w, nil } c, err := b.peek(id) if err != nil { return nil, err } w := urnToWindowCoder(c.GetSpec().GetSpec().GetUrn()) b.windowCoders[id] = w return w, nil } func urnToWindowCoder(urn string) *coder.WindowCoder { switch urn { case urnGlobalWindow: return coder.NewGlobalWindow() case urnIntervalWindow: return coder.NewIntervalWindow() default: panic(fmt.Sprintf("Unexpected window coder: %v", urn)) } } func (b *CoderUnmarshaller) makeCoder(c *pb.Coder) (*coder.Coder, error) { urn := c.GetSpec().GetSpec().GetUrn() components := c.GetComponentCoderIds() switch urn { case urnBytesCoder: return coder.NewBytes(), nil case urnVarIntCoder: return coder.NewVarInt(), nil case urnKVCoder: if len(components) != 2 { return nil, fmt.Errorf("bad pair: %v", c) } key, err := b.Coder(components[0]) if err != nil { return nil, err } id := components[1] kind := coder.KV root := typex.KVType elm, err := b.peek(id) if err != nil { return nil, err } isGBK := elm.GetSpec().GetSpec().GetUrn() == urnIterableCoder if isGBK { id = elm.GetComponentCoderIds()[0] kind = coder.CoGBK root = typex.CoGBKType // TODO(BEAM-490): If CoGBK with > 1 input, handle as special GBK. We expect // it to be encoded as CoGBK<K,LP<CoGBKList<V,W,..>>>. Remove this handling once // CoGBK has a first-class representation. if ids, ok := b.isCoGBKList(id); ok { // CoGBK<K,V,W,..> values, err := b.Coders(ids) if err != nil { return nil, err } t := typex.New(root, append([]typex.FullType{key.T}, coder.Types(values)...)...) return &coder.Coder{Kind: kind, T: t, Components: append([]*coder.Coder{key}, values...)}, nil } } value, err := b.Coder(id) if err != nil { return nil, err } t := typex.New(root, key.T, value.T) return &coder.Coder{Kind: kind, T: t, Components: []*coder.Coder{key, value}}, nil case urnLengthPrefixCoder: if len(components) != 1 { return nil, fmt.Errorf("bad length prefix: %v", c) } elm, err := b.peek(components[0]) if err != nil { return nil, err } if elm.GetSpec().GetSpec().GetUrn() != urnCustomCoder { // TODO(herohde) 11/17/2017: revisit this restriction return nil, fmt.Errorf("expected length prefix of custom coder only: %v", elm) } var ref v1.CustomCoder if err := protox.DecodeBase64(string(elm.GetSpec().GetSpec().GetPayload()), &ref); err != nil { return nil, err } custom, err := decodeCustomCoder(&ref) if err != nil { return nil, err } t := typex.New(custom.Type) return &coder.Coder{Kind: coder.Custom, T: t, Custom: custom}, nil case urnWindowedValueCoder: if len(components) != 2 { return nil, fmt.Errorf("bad windowed value: %v", c) } elm, err := b.Coder(components[0]) if err != nil { return nil, err } w, err := b.WindowCoder(components[1]) if err != nil { return nil, err } t := typex.New(typex.WindowedValueType, elm.T) return &coder.Coder{Kind: coder.WindowedValue, T: t, Components: []*coder.Coder{elm}, Window: w}, nil case streamType: return nil, fmt.Errorf("stream must be pair value: %v", c) case "": // TODO(herohde) 11/27/2017: we still see CoderRefs from Dataflow. Handle that // case here, for now, so that the harness can use this logic. payload := c.GetSpec().GetSpec().GetPayload() var ref CoderRef if err := json.Unmarshal(payload, &ref); err != nil { return nil, fmt.Errorf("failed to decode urn-less coder payload \"%v\": %v", string(payload), err) } c, err := DecodeCoderRef(&ref) if err != nil { return nil, fmt.Errorf("failed to translate coder \"%v\": %v", string(payload), err) } return c, nil default: return nil, fmt.Errorf("custom coders must be length prefixed: %v", c) } } func (b *CoderUnmarshaller) peek(id string) (*pb.Coder, error) { c, ok := b.models[id] if !ok { return nil, fmt.Errorf("coder with id %v not found", id) } return c, nil } func (b *CoderUnmarshaller) isCoGBKList(id string) ([]string, bool) { elm, err := b.peek(id) if err != nil { return nil, false } if elm.GetSpec().GetSpec().GetUrn() != urnLengthPrefixCoder { return nil, false } elm2, err := b.peek(elm.GetComponentCoderIds()[0]) if err != nil { return nil, false } if elm2.GetSpec().GetSpec().GetUrn() != urnCoGBKList { return nil, false } return elm2.GetComponentCoderIds(), true } // CoderMarshaller incrementally builds a compact model representation of a set // of coders. Identical coders are shared. type CoderMarshaller struct { coders map[string]*pb.Coder coder2id map[string]string // index of serialized coders to id to deduplicate } // NewCoderMarshaller returns a new CoderMarshaller. func NewCoderMarshaller() *CoderMarshaller { return &CoderMarshaller{ coders: make(map[string]*pb.Coder), coder2id: make(map[string]string), } } // Add adds the given coder to the set and returns its id. Idempotent. func (b *CoderMarshaller) Add(c *coder.Coder) string { switch c.Kind { case coder.Custom: ref, err := encodeCustomCoder(c.Custom) if err != nil { panic(fmt.Sprintf("failed to encode custom coder: %v", err)) } data, err := protox.EncodeBase64(ref) if err != nil { panic(fmt.Sprintf("failed to marshal custom coder: %v", err)) } inner := b.internCoder(&pb.Coder{ Spec: &pb.SdkFunctionSpec{ Spec: &pb.FunctionSpec{ Urn: urnCustomCoder, Payload: []byte(data), }, // TODO(BEAM-3204): coders should not have environments. }, }) return b.internBuiltInCoder(urnLengthPrefixCoder, inner) case coder.KV: comp := b.AddMulti(c.Components) return b.internBuiltInCoder(urnKVCoder, comp...) case coder.CoGBK: comp := b.AddMulti(c.Components) value := comp[1] if len(comp) > 2 { // TODO(BEAM-490): don't inject union coder for CoGBK. union := b.internBuiltInCoder(urnCoGBKList, comp[1:]...) value = b.internBuiltInCoder(urnLengthPrefixCoder, union) } stream := b.internBuiltInCoder(urnIterableCoder, value) return b.internBuiltInCoder(urnKVCoder, comp[0], stream) case coder.WindowedValue: comp := b.AddMulti(c.Components) comp = append(comp, b.AddWindowCoder(c.Window)) return b.internBuiltInCoder(urnWindowedValueCoder, comp...) case coder.Bytes: // TODO(herohde) 6/27/2017: add length-prefix and not assume nested by context? return b.internBuiltInCoder(urnBytesCoder) case coder.VarInt: return b.internBuiltInCoder(urnVarIntCoder) default: panic(fmt.Sprintf("Unexpected coder kind: %v", c.Kind)) } } // AddMulti adds the given coders to the set and returns their ids. Idempotent. func (b *CoderMarshaller) AddMulti(list []*coder.Coder) []string { var ids []string for _, c := range list { ids = append(ids, b.Add(c)) } return ids } // AddWindowCoder adds a window coder. func (b *CoderMarshaller) AddWindowCoder(w *coder.WindowCoder) string { switch w.Kind { case coder.GlobalWindow: return b.internBuiltInCoder(urnGlobalWindow) case coder.IntervalWindow: return b.internBuiltInCoder(urnIntervalWindow) default: panic(fmt.Sprintf("Unexpected window kind: %v", w.Kind)) } } // Build returns the set of model coders. Note that the map may be larger // than the number of coders added, because component coders are included. func (b *CoderMarshaller) Build() map[string]*pb.Coder { return b.coders } func (b *CoderMarshaller) internBuiltInCoder(urn string, components ...string) string { return b.internCoder(&pb.Coder{ Spec: &pb.SdkFunctionSpec{ Spec: &pb.FunctionSpec{ Urn: urn, }, }, ComponentCoderIds: components, }) } func (b *CoderMarshaller) internCoder(coder *pb.Coder) string { key := proto.MarshalTextString(coder) if id, exists := b.coder2id[key]; exists { return id } id := fmt.Sprintf("c%v", len(b.coder2id)) b.coder2id[key] = id b.coders[id] = coder return id }
Java
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.vlib.ejb.impl; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ejb.CreateException; import javax.ejb.FinderException; import javax.ejb.RemoveException; import javax.ejb.SessionBean; import javax.ejb.SessionContext; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.rmi.PortableRemoteObject; import javax.sql.DataSource; import org.apache.tapestry.Tapestry; import org.apache.tapestry.contrib.ejb.XCreateException; import org.apache.tapestry.contrib.ejb.XEJBException; import org.apache.tapestry.contrib.ejb.XRemoveException; import org.apache.tapestry.contrib.jdbc.IStatement; import org.apache.tapestry.contrib.jdbc.StatementAssembly; import org.apache.tapestry.vlib.ejb.Book; import org.apache.tapestry.vlib.ejb.BorrowException; import org.apache.tapestry.vlib.ejb.IBook; import org.apache.tapestry.vlib.ejb.IBookHome; import org.apache.tapestry.vlib.ejb.IPerson; import org.apache.tapestry.vlib.ejb.IPersonHome; import org.apache.tapestry.vlib.ejb.IPublisher; import org.apache.tapestry.vlib.ejb.IPublisherHome; import org.apache.tapestry.vlib.ejb.LoginException; import org.apache.tapestry.vlib.ejb.Person; import org.apache.tapestry.vlib.ejb.Publisher; import org.apache.tapestry.vlib.ejb.RegistrationException; import org.apache.tapestry.vlib.ejb.SortColumn; import org.apache.tapestry.vlib.ejb.SortOrdering; /** * Implementation of the {@link org.apache.tapestry.vlib.ejb.IOperations} * stateless session bean. * * <p>Implenents a number of stateless operations for the front end. * * @version $Id$ * @author Howard Lewis Ship * **/ public class OperationsBean implements SessionBean { private SessionContext _context; private transient Context _environment; private transient IBookHome _bookHome; private transient IPersonHome _personHome; private transient IPublisherHome _publisherHome; /** * Data source, retrieved from the ENC property * "jdbc/dataSource". * **/ private transient DataSource _dataSource; /** * Sets up the bean. Locates the {@link DataSource} for the bean * as <code>jdbc/dataSource</code> within the ENC; this data source is * later used by {@link #getConnection()}. * **/ public void ejbCreate() { Context initial; try { initial = new InitialContext(); _environment = (Context) initial.lookup("java:comp/env"); } catch (NamingException e) { throw new XEJBException("Could not lookup environment.", e); } try { _dataSource = (DataSource) _environment.lookup("jdbc/dataSource"); } catch (NamingException e) { e.printStackTrace(); throw new XEJBException("Could not lookup data source.", e); } } public void ejbRemove() { } /** * Does nothing, not invoked in stateless session beans. **/ public void ejbPassivate() { } public void setSessionContext(SessionContext value) { _context = value; } /** * Does nothing, not invoked in stateless session beans. * **/ public void ejbActivate() { } /** * Finds the book and borrower (by thier primary keys) and updates the book. * * <p>The {@link Book} value object is returned. * **/ public Book borrowBook(Integer bookId, Integer borrowerId) throws FinderException, RemoteException, BorrowException { IBookHome bookHome = getBookHome(); IPersonHome personHome = getPersonHome(); IBook book = bookHome.findByPrimaryKey(bookId); if (!book.getLendable()) throw new BorrowException("Book may not be borrowed."); // Verify that the borrower exists. personHome.findByPrimaryKey(borrowerId); // TBD: Check that borrower has authenticated // findByPrimaryKey() throws an exception if the EJB doesn't exist, // so we're safe. personHome.findByPrimaryKey(book.getOwnerId()); // Here's the real work; just setting the holder of the book // to be the borrower. book.setHolderId(borrowerId); return getBook(bookId); } /** * Adds a new book, verifying that the publisher and holder actually exist. * **/ public Integer addBook(Map attributes) throws CreateException, RemoteException { IBookHome home = getBookHome(); attributes.put("dateAdded", new Timestamp(System.currentTimeMillis())); IBook book = home.create(attributes); return (Integer) book.getPrimaryKey(); } /** * Adds a book, which will be owned and held by the specified owner. * * <p>The publisherName may either be the name of a known publisher, or * a new name. A new {@link IPublisher} will be created as necessary. * * <p>Returns the newly created book, as a {@link Map} of attributes. * **/ public Integer addBook(Map attributes, String publisherName) throws CreateException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); // Find or create the publisher. try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means that no publisher with the given name already exists. } if (publisher == null) publisher = publisherHome.create(publisherName); attributes.put("publisherId", publisher.getPrimaryKey()); return addBook(attributes); } /** * Updates a book. * * <p>Returns the updated book. * * @param bookId The primary key of the book to update. * **/ public void updateBook(Integer bookId, Map attributes) throws FinderException, RemoteException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); book.updateEntityAttributes(attributes); } /** * Updates a book, adding a new Publisher at the same time. * * * @param bookPK The primary key of the book to update. * @param attributes attributes to change * @param publisherName The name of the new publisher. * @throws FinderException if the book, holder or publisher can not be located. * @throws CreateException if the {@link IPublisher} can not be created. **/ public void updateBook(Integer bookId, Map attributes, String publisherName) throws CreateException, FinderException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means we need to create the Publisher } if (publisher == null) publisher = publisherHome.create(publisherName); // Don't duplicate all that other code! attributes.put("publisherId", publisher.getPrimaryKey()); updateBook(bookId, attributes); } public void updatePerson(Integer personId, Map attributes) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); person.updateEntityAttributes(attributes); } public Publisher[] getPublishers() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PUBLISHER_ID, NAME"); assembly.newLine("FROM PUBLISHER"); assembly.newLine("ORDER BY NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); while (set.next()) { Integer primaryKey = (Integer) set.getObject(1); String name = set.getString(2); list.add(new Publisher(primaryKey, name)); } } catch (SQLException ex) { ex.printStackTrace(); throw new XEJBException("Could not fetch all Publishers.", ex); } finally { close(connection, statement, set); } // Convert from List to Publisher[] return (Publisher[]) list.toArray(new Publisher[list.size()]); } /** * Fetchs all {@link IPerson} beans in the database and converts them * to {@link Person} objects. * * Returns the {@link Person}s sorted by last name, then first. **/ public Person[] getPersons() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); Object[] columns = new Object[Person.N_COLUMNS]; while (set.next()) { list.add(convertRowToPerson(set, columns)); } } catch (SQLException ex) { throw new XEJBException("Could not fetch all Persons.", ex); } finally { close(connection, statement, set); } return (Person[]) list.toArray(new Person[list.size()]); } /** * Gets the {@link Person} for primary key. * * @throws FinderException if the Person does not exist. **/ public Person getPerson(Integer personId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Person result = null; try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("WHERE "); assembly.add("PERSON_ID = "); assembly.addParameter(personId); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Person #" + personId + " does not exist."); Object[] columns = new Object[Person.N_COLUMNS]; result = convertRowToPerson(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Person login(String email, String password) throws RemoteException, LoginException { IPersonHome home = getPersonHome(); IPerson person = null; Person result = null; try { person = home.findByEmail(email); } catch (FinderException ex) { throw new LoginException("Unknown e-mail address.", false); } if (!person.getPassword().equals(password)) throw new LoginException("Invalid password.", true); try { result = getPerson((Integer) person.getPrimaryKey()); } catch (FinderException ex) { throw new LoginException("Could not read person.", false); } if (result.isLockedOut()) throw new LoginException("You have been locked out of the Virtual Library.", false); // Set the last access time for any subsequent login. person.setLastAccess(new Timestamp(System.currentTimeMillis())); return result; } public Map getPersonAttributes(Integer personId) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); return person.getEntityAttributes(); } /** * Retrieves a single {@link Book} by its primary key. * * @throws FinderException if the Book does not exist. * **/ public Book getBook(Integer bookId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Book result = null; try { connection = getConnection(); StatementAssembly assembly = buildBaseBookQuery(); assembly.addSep(" AND "); assembly.add("book.BOOK_ID = "); assembly.addParameter(bookId); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Book " + bookId + " does not exist."); Object[] columns = new Object[Book.N_COLUMNS]; result = convertRowToBook(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Map getBookAttributes(Integer bookId) throws FinderException, RemoteException { IBookHome home = getBookHome(); IBook book = home.findByPrimaryKey(bookId); return book.getEntityAttributes(); } /** * Attempts to register a new user, first checking that the * e-mail and names are unique. Returns the primary key of the * new {@link IPerson}. * **/ public Person registerNewUser(String firstName, String lastName, String email, String password) throws RegistrationException, CreateException, RemoteException { IPersonHome home; if (password == null || password.trim().length() == 0) throw new RegistrationException("Must specify a password."); validateUniquePerson(firstName, lastName, email); home = getPersonHome(); Map attributes = new HashMap(); attributes.put("lastName", lastName.trim()); attributes.put("firstName", firstName.trim()); attributes.put("email", email.trim()); attributes.put("password", password.trim()); attributes.put("lastAccess", new Timestamp(System.currentTimeMillis())); IPerson person = home.create(attributes); Integer personId = (Integer) person.getPrimaryKey(); try { return getPerson(personId); } catch (FinderException ex) { throw new XCreateException("Unable to find newly created Person.", ex); } } public Book deleteBook(Integer bookId) throws RemoveException, RemoteException { IBookHome home = getBookHome(); Book result = null; try { result = getBook(bookId); } catch (FinderException ex) { throw new XRemoveException(ex); } home.remove(bookId); return result; } /** * Transfers a number of books to a new owner. * **/ public void transferBooks(Integer newOwnerId, Integer[] bookIds) throws FinderException, RemoteException { if (bookIds == null) throw new RemoteException("Must supply non-null list of books to transfer."); if (newOwnerId == null) throw new RemoteException("Must provide an owner for the books."); // Verify that the new owner exists. IPersonHome personHome = getPersonHome(); personHome.findByPrimaryKey(newOwnerId); // Direct SQL would be more efficient, but this'll probably do. IBookHome home = getBookHome(); for (int i = 0; i < bookIds.length; i++) { IBook book = home.findByPrimaryKey(bookIds[i]); book.setOwnerId(newOwnerId); } } public void updatePublishers(Publisher[] updated, Integer[] deleted) throws FinderException, RemoveException, RemoteException { IPublisherHome home = getPublisherHome(); if (updated != null) { for (int i = 0; i < updated.length; i++) { IPublisher publisher = home.findByPrimaryKey(updated[i].getId()); publisher.setName(updated[i].getName()); } } if (deleted != null) { for (int i = 0; i < deleted.length; i++) { home.remove(deleted[i]); } } } public void updatePersons( Person[] updated, Integer[] resetPassword, String newPassword, Integer[] deleted, Integer adminId) throws FinderException, RemoveException, RemoteException { IPersonHome home = getPersonHome(); int count = Tapestry.size(updated); for (int i = 0; i < count; i++) { Person u = updated[i]; IPerson person = home.findByPrimaryKey(u.getId()); person.setAdmin(u.isAdmin()); person.setLockedOut(u.isLockedOut()); } count = Tapestry.size(resetPassword); for (int i = 0; i < count; i++) { IPerson person = home.findByPrimaryKey(resetPassword[i]); person.setPassword(newPassword); } count = Tapestry.size(deleted); if (count > 0) { returnBooksFromDeletedPersons(deleted); moveBooksFromDeletedPersons(deleted, adminId); } for (int i = 0; i < count; i++) home.remove(deleted[i]); } /** * Invoked to update all books owned by people about to be deleted, to * reassign the books holder back to the owner. * **/ private void returnBooksFromDeletedPersons(Integer deletedPersonIds[]) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET HOLDER_ID = OWNER_ID"); assembly.newLine("WHERE HOLDER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } /** * Invoked to execute a bulk update that moves books to the new admin. * **/ private void moveBooksFromDeletedPersons(Integer deletedPersonIds[], Integer adminId) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET OWNER_ID = "); assembly.addParameter(adminId); assembly.newLine("WHERE OWNER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } private void executeUpdate(StatementAssembly assembly) throws XRemoveException { Connection connection = null; IStatement statement = null; try { connection = getConnection(); statement = assembly.createStatement(connection); statement.executeUpdate(); statement.close(); statement = null; connection.close(); connection = null; } catch (SQLException ex) { throw new XRemoveException( "Unable to execute " + assembly + ": " + ex.getMessage(), ex); } finally { close(connection, statement, null); } } /** * Translates the next row from the result set into a {@link Book}. * * <p>This works with queries generated by {@link #buildBaseBookQuery()}. * **/ protected Book convertRowToBook(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Book.ID_COLUMN] = set.getObject(column++); columns[Book.TITLE_COLUMN] = set.getString(column++); columns[Book.DESCRIPTION_COLUMN] = set.getString(column++); columns[Book.ISBN_COLUMN] = set.getString(column++); columns[Book.OWNER_ID_COLUMN] = set.getObject(column++); columns[Book.OWNER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.HOLDER_ID_COLUMN] = set.getObject(column++); columns[Book.HOLDER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.PUBLISHER_ID_COLUMN] = set.getObject(column++); columns[Book.PUBLISHER_NAME_COLUMN] = set.getString(column++); columns[Book.AUTHOR_COLUMN] = set.getString(column++); columns[Book.HIDDEN_COLUMN] = getBoolean(set, column++); columns[Book.LENDABLE_COLUMN] = getBoolean(set, column++); columns[Book.DATE_ADDED_COLUMN] = set.getTimestamp(column++); return new Book(columns); } private String buildName(String firstName, String lastName) { if (firstName == null) return lastName; return firstName + " " + lastName; } /** * All queries must use this exact set of select columns, so that * {@link #convertRow(ResultSet, Object[])} can build * the correct {@link Book} from each row. * **/ private static final String[] BOOK_SELECT_COLUMNS = { "book.BOOK_ID", "book.TITLE", "book.DESCRIPTION", "book.ISBN", "owner.PERSON_ID", "owner.FIRST_NAME", "owner.LAST_NAME", "holder.PERSON_ID", "holder.FIRST_NAME", "holder.LAST_NAME", "publisher.PUBLISHER_ID", "publisher.NAME", "book.AUTHOR", "book.HIDDEN", "book.LENDABLE", "book.DATE_ADDED" }; private static final String[] BOOK_ALIAS_COLUMNS = { "BOOK book", "PERSON owner", "PERSON holder", "PUBLISHER publisher" }; private static final String[] BOOK_JOINS = { "book.OWNER_ID = owner.PERSON_ID", "book.HOLDER_ID = holder.PERSON_ID", "book.PUBLISHER_ID = publisher.PUBLISHER_ID" }; private static final Map BOOK_SORT_ASCENDING = new HashMap(); private static final Map BOOK_SORT_DESCENDING = new HashMap(); static { BOOK_SORT_ASCENDING.put(SortColumn.TITLE, "book.TITLE"); BOOK_SORT_ASCENDING.put(SortColumn.HOLDER, "holder.LAST_NAME, holder.FIRST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME, owner.LAST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.PUBLISHER, "publisher.NAME"); BOOK_SORT_ASCENDING.put(SortColumn.AUTHOR, "book.AUTHOR"); BOOK_SORT_DESCENDING.put(SortColumn.TITLE, "book.TITLE DESC"); BOOK_SORT_DESCENDING.put( SortColumn.HOLDER, "holder.LAST_NAME DESC, holder.FIRST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME DESC, owner.LAST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.PUBLISHER, "publisher.NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.AUTHOR, "book.AUTHOR DESC"); } protected StatementAssembly buildBaseBookQuery() { StatementAssembly result = new StatementAssembly(); result.newLine("SELECT "); result.addList(BOOK_SELECT_COLUMNS, ", "); result.newLine("FROM "); result.addList(BOOK_ALIAS_COLUMNS, ", "); result.newLine("WHERE "); result.addList(BOOK_JOINS, " AND "); return result; } /** * Adds a sort ordering clause to the statement. If ordering is null, * orders by book title. * * @param assembly to update * @param ordering defines the column to sort on, and the order (ascending or descending) * @since 3.0 * * **/ protected void addSortOrdering(StatementAssembly assembly, SortOrdering ordering) { if (ordering == null) { assembly.newLine("ORDER BY book.TITLE"); return; } Map sorts = ordering.isDescending() ? BOOK_SORT_DESCENDING : BOOK_SORT_ASCENDING; String term = (String) sorts.get(ordering.getColumn()); assembly.newLine("ORDER BY "); assembly.add(term); } protected void addSubstringSearch(StatementAssembly assembly, String column, String value) { if (value == null) return; String trimmed = value.trim(); if (trimmed.length() == 0) return; // Here's the McKoi dependency: LOWER() is a database-specific // SQL function. assembly.addSep(" AND LOWER("); assembly.add(column); assembly.add(") LIKE"); assembly.addParameter("%" + trimmed.toLowerCase() + "%"); } /** * Closes the resultSet (if not null), then the statement (if not null), * then the Connection (if not null). Exceptions are written to System.out. * **/ protected void close(Connection connection, IStatement statement, ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { System.out.println("Exception closing result set."); ex.printStackTrace(); } } if (statement != null) { try { statement.close(); } catch (SQLException ex) { System.out.println("Exception closing statement."); ex.printStackTrace(); } } if (connection != null) { try { connection.close(); } catch (SQLException ex) { System.out.println("Exception closing connection."); ex.printStackTrace(); } } } private IPersonHome getPersonHome() { if (_personHome == null) { try { Object raw = _environment.lookup("ejb/Person"); _personHome = (IPersonHome) PortableRemoteObject.narrow(raw, IPersonHome.class); } catch (NamingException ex) { throw new XEJBException("Could not lookup Person home interface.", ex); } } return _personHome; } private IPublisherHome getPublisherHome() { if (_publisherHome == null) { try { Object raw = _environment.lookup("ejb/Publisher"); _publisherHome = (IPublisherHome) PortableRemoteObject.narrow(raw, IPublisherHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Publisher home interface.", e); } } return _publisherHome; } private IBookHome getBookHome() { if (_bookHome == null) { try { Object raw = _environment.lookup("ejb/Book"); _bookHome = (IBookHome) PortableRemoteObject.narrow(raw, IBookHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Book home interface.", e); } } return _bookHome; } /** * Gets a new connection from the data source. * **/ protected Connection getConnection() { try { return _dataSource.getConnection(); } catch (SQLException e) { throw new XEJBException("Unable to get database connection from pool.", e); } } protected StatementAssembly buildBasePersonQuery() { StatementAssembly result; result = new StatementAssembly(); result.newLine("SELECT PERSON_ID, FIRST_NAME, LAST_NAME, EMAIL, "); result.newLine(" LOCKED_OUT, ADMIN, LAST_ACCESS"); result.newLine("FROM PERSON"); return result; } /** * Translates the next row from the result set into a {@link Person}. * * <p>This works with queries generated by {@link #buildBasePersonQuery()}. * **/ protected Person convertRowToPerson(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Person.ID_COLUMN] = set.getObject(column++); columns[Person.FIRST_NAME_COLUMN] = set.getString(column++); columns[Person.LAST_NAME_COLUMN] = set.getString(column++); columns[Person.EMAIL_COLUMN] = set.getString(column++); columns[Person.LOCKED_OUT_COLUMN] = getBoolean(set, column++); columns[Person.ADMIN_COLUMN] = getBoolean(set, column++); columns[Person.LAST_ACCESS_COLUMN] = set.getTimestamp(column++); return new Person(columns); } private Boolean getBoolean(ResultSet set, int index) throws SQLException { return set.getBoolean(index) ? Boolean.TRUE : Boolean.FALSE; } private void validateUniquePerson(String firstName, String lastName, String email) throws RegistrationException { Connection connection = null; IStatement statement = null; ResultSet set = null; String trimmedEmail = email.trim().toLowerCase(); String trimmedLastName = lastName.trim().toLowerCase(); String trimmedFirstName = firstName.trim().toLowerCase(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(EMAIL) = "); assembly.addParameter(trimmedEmail); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Email address is already in use by another user."); close(null, statement, set); assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(FIRST_NAME) = "); assembly.addParameter(trimmedFirstName); assembly.addSep(" AND "); assembly.add("LOWER(LAST_NAME) = "); assembly.addParameter(trimmedLastName); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Name provided is already in use by another user."); } catch (SQLException e) { throw new RegistrationException("Could not access database: " + e.getMessage(), e); } finally { close(connection, statement, set); } } public Book returnBook(Integer bookId) throws RemoteException, FinderException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); Integer ownerPK = book.getOwnerId(); book.setHolderId(ownerPK); return getBook(bookId); } }
Java
/* * Copyright © 2014 - 2018 Leipzig University (Database Research Group) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Contains implementations graph pattern matching on a single input graph. */ package org.gradoop.flink.model.impl.operators.matching.transactional.function;
Java
package org.sakaiproject.scorm.ui.player.behaviors; import org.adl.api.ecmascript.SCORM13APIInterface; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.scorm.model.api.ScoBean; import org.sakaiproject.scorm.model.api.SessionBean; import org.sakaiproject.scorm.navigation.INavigable; import org.sakaiproject.scorm.navigation.INavigationEvent; import org.sakaiproject.scorm.service.api.ScormApplicationService; import org.sakaiproject.scorm.service.api.ScormSequencingService; public abstract class SCORM13API implements SCORM13APIInterface { private static Log log = LogFactory.getLog(SCORM13API.class); // String value of FALSE for JavaScript returns. protected static final String STRING_FALSE = "false"; // String value of TRUE for JavaScript returns. protected static final String STRING_TRUE = "true"; public abstract SessionBean getSessionBean(); public abstract ScormApplicationService getApplicationService(); public abstract ScormSequencingService getSequencingService(); public abstract ScoBean getScoBean(); public abstract INavigable getAgent(); public abstract Object getTarget(); // Implementation of SCORM13APIInterface public String Commit(String parameter) { // TODO: Disable UI controls -- or throttle them on server -- don't mess with js // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); } if (getApplicationService().commit(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; // TODO: Enable UI controls return result; } public String GetDiagnostic(String errorCode) { return getApplicationService().getDiagnostic(errorCode, getSessionBean()); } public String GetErrorString(String errorCode) { return getApplicationService().getErrorString(errorCode, getSessionBean()); } public String GetLastError() { return getApplicationService().getLastError(getSessionBean()); } public String GetValue(String parameter) { return getApplicationService().getValue(parameter, getSessionBean(), getScoBean()); } public String Initialize(String parameter) { // Assume failure String result = STRING_FALSE; if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; return result; } public String SetValue(String dataModelElement, String value) { // Assume failure String result = STRING_FALSE; if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) { result = STRING_TRUE; } return result; } public String Terminate(String parameter) { // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); return result; } INavigationEvent navigationEvent = getApplicationService().newNavigationEvent(); boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent, getSessionBean(), getScoBean()); if (isSuccessful) { result = STRING_TRUE; if (navigationEvent.isChoiceEvent()) { getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget()); } else { getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget()); } } return result; } }
Java
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #pragma once #include <aws/alexaforbusiness/AlexaForBusiness_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace AlexaForBusiness { namespace Model { class AWS_ALEXAFORBUSINESS_API CreateAddressBookResult { public: CreateAddressBookResult(); CreateAddressBookResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); CreateAddressBookResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); /** * <p>The ARN of the newly created address book.</p> */ inline const Aws::String& GetAddressBookArn() const{ return m_addressBookArn; } /** * <p>The ARN of the newly created address book.</p> */ inline void SetAddressBookArn(const Aws::String& value) { m_addressBookArn = value; } /** * <p>The ARN of the newly created address book.</p> */ inline void SetAddressBookArn(Aws::String&& value) { m_addressBookArn = std::move(value); } /** * <p>The ARN of the newly created address book.</p> */ inline void SetAddressBookArn(const char* value) { m_addressBookArn.assign(value); } /** * <p>The ARN of the newly created address book.</p> */ inline CreateAddressBookResult& WithAddressBookArn(const Aws::String& value) { SetAddressBookArn(value); return *this;} /** * <p>The ARN of the newly created address book.</p> */ inline CreateAddressBookResult& WithAddressBookArn(Aws::String&& value) { SetAddressBookArn(std::move(value)); return *this;} /** * <p>The ARN of the newly created address book.</p> */ inline CreateAddressBookResult& WithAddressBookArn(const char* value) { SetAddressBookArn(value); return *this;} private: Aws::String m_addressBookArn; }; } // namespace Model } // namespace AlexaForBusiness } // namespace Aws
Java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import junit.framework.TestCase; /** * @version */ public class CaseInsensitiveMapTest extends TestCase { public void testLookupCaseAgnostic() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); } public void testLookupCaseAgnosticAddHeader() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertEquals("beer", map.get("BAR")); assertNull(map.get("unknown")); } public void testLookupCaseAgnosticAddHeader2() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("BAR")); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertNull(map.get("unknown")); } public void testLookupCaseAgnosticAddHeaderRemoveHeader() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertEquals("beer", map.get("BAR")); assertNull(map.get("unknown")); map.remove("bar"); assertNull(map.get("bar")); assertNull(map.get("unknown")); } public void testSetWithDifferentCase() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); map.put("Foo", "bar"); assertEquals("bar", map.get("FOO")); assertEquals("bar", map.get("foo")); assertEquals("bar", map.get("Foo")); } public void testRemoveWithDifferentCase() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); map.put("Foo", "bar"); assertEquals("bar", map.get("FOO")); assertEquals("bar", map.get("foo")); assertEquals("bar", map.get("Foo")); map.remove("FOO"); assertEquals(null, map.get("foo")); assertEquals(null, map.get("Foo")); assertEquals(null, map.get("FOO")); assertTrue(map.isEmpty()); } public void testPutAll() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); Map<String, Object> other = new CaseInsensitiveMap(); other.put("Foo", "cheese"); other.put("bar", 123); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); // key case should be preserved Map<String, Object> keys = new HashMap<String, Object>(); keys.putAll(map); assertEquals("cheese", keys.get("Foo")); assertNull(keys.get("foo")); assertNull(keys.get("FOO")); assertEquals(123, keys.get("bar")); assertNull(keys.get("Bar")); assertNull(keys.get("BAR")); } public void testPutAllOther() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); Map<String, Object> other = new HashMap<String, Object>(); other.put("Foo", "cheese"); other.put("bar", 123); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); } public void testPutAllEmpty() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("foo", "cheese"); Map<String, Object> other = new HashMap<String, Object>(); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(1, map.size()); } public void testConstructFromOther() { Map<String, Object> other = new HashMap<String, Object>(); other.put("Foo", "cheese"); other.put("bar", 123); Map<String, Object> map = new CaseInsensitiveMap(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); } public void testKeySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", 123); map.put("baZ", "beer"); Set keys = map.keySet(); // we should be able to lookup no matter what case assertTrue(keys.contains("Foo")); assertTrue(keys.contains("foo")); assertTrue(keys.contains("FOO")); assertTrue(keys.contains("BAR")); assertTrue(keys.contains("bar")); assertTrue(keys.contains("Bar")); assertTrue(keys.contains("baZ")); assertTrue(keys.contains("baz")); assertTrue(keys.contains("Baz")); assertTrue(keys.contains("BAZ")); } public void testRetainKeysCopyToAnotherMap() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", 123); map.put("baZ", "beer"); Map<String, Object> other = new HashMap<String, Object>(map); // we should retain the cases of the original keys // when its copied to another map assertTrue(other.containsKey("Foo")); assertFalse(other.containsKey("foo")); assertFalse(other.containsKey("FOO")); assertTrue(other.containsKey("BAR")); assertFalse(other.containsKey("bar")); assertFalse(other.containsKey("Bar")); assertTrue(other.containsKey("baZ")); assertFalse(other.containsKey("baz")); assertFalse(other.containsKey("Baz")); assertFalse(other.containsKey("BAZ")); } public void testValues() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "123"); map.put("baZ", "Beer"); Iterator it = map.values().iterator(); // should be String values assertEquals("String", it.next().getClass().getSimpleName()); assertEquals("String", it.next().getClass().getSimpleName()); assertEquals("String", it.next().getClass().getSimpleName()); Collection values = map.values(); assertEquals(3, values.size()); assertTrue(values.contains("cheese")); assertTrue(values.contains("123")); assertTrue(values.contains("Beer")); } public void testRomeks() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("foo", "cheese"); assertEquals(1, map.size()); assertEquals("cheese", map.get("fOo")); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals(true, map.keySet().contains("FOO")); map.put("FOO", "cake"); assertEquals(1, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals("cake", map.get("fOo")); } public void testRomeksUsingRegularHashMap() { Map<String, Object> map = new HashMap<String, Object>(); map.put("foo", "cheese"); assertEquals(1, map.size()); assertEquals(null, map.get("fOo")); assertEquals(true, map.containsKey("foo")); assertEquals(false, map.containsKey("FOO")); assertEquals(false, map.keySet().contains("FOO")); map.put("FOO", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals(null, map.get("fOo")); assertEquals("cheese", map.get("foo")); assertEquals("cake", map.get("FOO")); } public void testRomeksTransferredToHashMapAfterwards() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("FOO", "cake"); assertEquals(1, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); Map<String, Object> other = new HashMap<String, Object>(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("FOO")); assertEquals(1, other.size()); } public void testSerialization() throws Exception { CaseInsensitiveMap testMap = new CaseInsensitiveMap(); testMap.put("key", "value"); // force entry set to be created which could cause the map to be non serializable testMap.entrySet(); ByteArrayOutputStream bStream = new ByteArrayOutputStream(); ObjectOutputStream objStream = new ObjectOutputStream(bStream); objStream.writeObject(testMap); ObjectInputStream inStream = new ObjectInputStream(new ByteArrayInputStream(bStream.toByteArray())); CaseInsensitiveMap testMapCopy = (CaseInsensitiveMap) inStream.readObject(); assertTrue(testMapCopy.containsKey("key")); } public void testCopyToAnotherMapPreserveKeyCaseEntrySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); for (Map.Entry<String, Object> entry : map.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); other.put(key, value); } assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyCasePutAll() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); other.putAll(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyCaseCtr() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyKeySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); // this is wrong!!! you should use entrySet for (String key : map.keySet()) { Object value = map.get(key); other.put(key, value); } // now the keys will be in lower case assertEquals(true, other.containsKey("foo")); assertEquals(false, other.containsKey("Foo")); assertEquals(true, other.containsKey("bar")); assertEquals(false, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testConcurrent() throws Exception { ExecutorService service = Executors.newFixedThreadPool(5); final CountDownLatch latch = new CountDownLatch(1000); final Map<String, Object> map = new CaseInsensitiveMap(); // do some stuff concurrently for (int i = 0; i < 1000; i++) { final int count = i; service.submit(new Runnable() { public void run() { Map<String, Object> foo = new CaseInsensitiveMap(); foo.put("counter" + count, count); foo.put("foo", 123); foo.put("bar", 456); foo.put("cake", "cheese"); // copy foo to map as map is a shared resource map.putAll(foo); latch.countDown(); } }); } latch.await(10, TimeUnit.SECONDS); assertEquals(1003, map.size()); assertEquals(true, map.containsKey("counter0")); assertEquals(true, map.containsKey("counter500")); assertEquals(true, map.containsKey("counter999")); assertEquals(123, map.get("FOO")); assertEquals(456, map.get("Bar")); assertEquals("cheese", map.get("cAKe")); service.shutdownNow(); } public void testCopyMapWithCamelHeadersTest() throws Exception { Map<String, Object> map = new CaseInsensitiveMap(); map.put("CamelA", "A"); map.put("CamelB", "B"); map.put("CamelC", "C"); // retain maps so we can profile that the map doesn't duplicate // camel keys as they are intern List<Map> maps = new ArrayList<Map>(); for (int i = 0; i < 10000; i++) { Map<String, Object> copy = new CaseInsensitiveMap(map); assertEquals(3, copy.size()); assertEquals("A", copy.get("CamelA")); assertEquals("B", copy.get("CamelB")); assertEquals("C", copy.get("CamelC")); maps.add(copy); } assertEquals(10000, maps.size()); assertEquals(3, map.size()); assertEquals("A", map.get("CamelA")); assertEquals("B", map.get("CamelB")); assertEquals("C", map.get("CamelC")); // use a memory profiler to see memory allocation // often you may want to give it time to run so you // have chance to capture memory snapshot in profiler // Thread.sleep(9999999); } }
Java
/* * Copyright DbMaintain.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dbmaintain.script.parser.impl; import org.dbmaintain.script.parser.ScriptParser; import org.dbmaintain.script.parser.parsingstate.ParsingState; import org.dbmaintain.util.DbMaintainException; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.util.Properties; /** * A class for parsing statements out of sql scripts. * <p/> * All statements should be separated with a semicolon (;). The last statement will be * added even if it does not end with a semicolon. The semicolons will not be included in the returned statements. * <p/> * This parser also takes quoted literals, double quoted text and in-line (--comment) and block (/ * comment * /) * into account when parsing the statements. * * @author Tim Ducheyne * @author Filip Neven * @author Stefan Bangels */ public class DefaultScriptParser implements ScriptParser { /** * The reader for the script content stream */ protected Reader scriptReader; /** * Whether backslash escaping is enabled */ protected boolean backSlashEscapingEnabled; /** * Parameters that must be replaced in the script. Null if there are no such parameters */ protected Properties scriptParameters; /** * The starting state */ protected ParsingState initialParsingState; /** * True if the script has ended */ protected boolean endOfScriptReached = false; /** * The current parsed character */ protected Character currentChar, nextChar; /** * Constructor for DefaultScriptParser. * * @param scriptReader the reader that will provide the script content, not null * @param initialParsingState the inial state when starting to parse a script, not null * @param backSlashEscapingEnabled true if backslash escaping is enabled * @param scriptParameters parameters that must be replaced in the script. null if there are no such parameters. */ public DefaultScriptParser(Reader scriptReader, ParsingState initialParsingState, boolean backSlashEscapingEnabled, Properties scriptParameters) { this.scriptReader = scriptReader; this.backSlashEscapingEnabled = backSlashEscapingEnabled; this.initialParsingState = initialParsingState; this.scriptParameters = scriptParameters; this.scriptReader = new BufferedReader(scriptReader); } /** * Parses the next statement out of the given script stream. * * @return the statements, null if no more statements */ public String getNextStatement() { try { return getNextStatementImpl(); } catch (IOException e) { throw new DbMaintainException("Unable to parse next statement from script.", e); } } /** * Actual implementation of getNextStatement. * * @return the statements, null if no more statements * @throws IOException if a problem occurs reading the script from the file system */ protected String getNextStatementImpl() throws IOException { StatementBuilder statementBuilder = createStatementBuilder(); // Make sure that we read currentChar when we start reading a new script. If not null, currentChar was already // set to the first character of the next statement when we read the previous statement. if (currentChar == null) { currentChar = readNextCharacter(); } while (!endOfScriptReached) { if (currentChar == null) { endOfScriptReached = true; } nextChar = readNextCharacter(); statementBuilder.addCharacter(currentChar, nextChar); currentChar = nextChar; if (statementBuilder.isComplete()) { if (statementBuilder.hasExecutableContent()) { return statementBuilder.buildStatement(); } statementBuilder = createStatementBuilder(); } } if (!statementBuilder.isComplete() && statementBuilder.hasExecutableContent()) { throw new DbMaintainException("Last statement in script was not ended correctly."); } return null; } protected Character readNextCharacter() throws IOException { int charAsInt = scriptReader.read(); return charAsInt == -1 ? null : (char) charAsInt; } /** * Factory method for the statement builder. * * @return The statement builder, not null */ protected StatementBuilder createStatementBuilder() { return new StatementBuilder(initialParsingState, scriptParameters); } }
Java
import logging import re import socket from mopidy.config import validators from mopidy.internal import log, path def decode(value): if isinstance(value, bytes): value = value.decode(errors="surrogateescape") for char in ("\\", "\n", "\t"): value = value.replace( char.encode(encoding="unicode-escape").decode(), char ) return value def encode(value): if isinstance(value, bytes): value = value.decode(errors="surrogateescape") for char in ("\\", "\n", "\t"): value = value.replace( char, char.encode(encoding="unicode-escape").decode() ) return value class DeprecatedValue: pass class ConfigValue: """Represents a config key's value and how to handle it. Normally you will only be interacting with sub-classes for config values that encode either deserialization behavior and/or validation. Each config value should be used for the following actions: 1. Deserializing from a raw string and validating, raising ValueError on failure. 2. Serializing a value back to a string that can be stored in a config. 3. Formatting a value to a printable form (useful for masking secrets). :class:`None` values should not be deserialized, serialized or formatted, the code interacting with the config should simply skip None config values. """ def deserialize(self, value): """Cast raw string to appropriate type.""" return decode(value) def serialize(self, value, display=False): """Convert value back to string for saving.""" if value is None: return "" return str(value) class Deprecated(ConfigValue): """Deprecated value. Used for ignoring old config values that are no longer in use, but should not cause the config parser to crash. """ def deserialize(self, value): return DeprecatedValue() def serialize(self, value, display=False): return DeprecatedValue() class String(ConfigValue): """String value. Is decoded as utf-8 and \\n \\t escapes should work and be preserved. """ def __init__(self, optional=False, choices=None): self._required = not optional self._choices = choices def deserialize(self, value): value = decode(value).strip() validators.validate_required(value, self._required) if not value: return None validators.validate_choice(value, self._choices) return value def serialize(self, value, display=False): if value is None: return "" return encode(value) class Secret(String): """Secret string value. Is decoded as utf-8 and \\n \\t escapes should work and be preserved. Should be used for passwords, auth tokens etc. Will mask value when being displayed. """ def __init__(self, optional=False, choices=None): self._required = not optional self._choices = None # Choices doesn't make sense for secrets def serialize(self, value, display=False): if value is not None and display: return "********" return super().serialize(value, display) class Integer(ConfigValue): """Integer value.""" def __init__( self, minimum=None, maximum=None, choices=None, optional=False ): self._required = not optional self._minimum = minimum self._maximum = maximum self._choices = choices def deserialize(self, value): value = decode(value) validators.validate_required(value, self._required) if not value: return None value = int(value) validators.validate_choice(value, self._choices) validators.validate_minimum(value, self._minimum) validators.validate_maximum(value, self._maximum) return value class Boolean(ConfigValue): """Boolean value. Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as :class:`True`. Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as :class:`False`. """ true_values = ("1", "yes", "true", "on") false_values = ("0", "no", "false", "off") def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value) validators.validate_required(value, self._required) if not value: return None if value.lower() in self.true_values: return True elif value.lower() in self.false_values: return False raise ValueError(f"invalid value for boolean: {value!r}") def serialize(self, value, display=False): if value is True: return "true" elif value in (False, None): return "false" else: raise ValueError(f"{value!r} is not a boolean") class List(ConfigValue): """List value. Supports elements split by commas or newlines. Newlines take presedence and empty list items will be filtered out. """ def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value) if "\n" in value: values = re.split(r"\s*\n\s*", value) else: values = re.split(r"\s*,\s*", value) values = tuple(v.strip() for v in values if v.strip()) validators.validate_required(values, self._required) return tuple(values) def serialize(self, value, display=False): if not value: return "" return "\n " + "\n ".join(encode(v) for v in value if v) class LogColor(ConfigValue): def deserialize(self, value): value = decode(value) validators.validate_choice(value.lower(), log.COLORS) return value.lower() def serialize(self, value, display=False): if value.lower() in log.COLORS: return encode(value.lower()) return "" class LogLevel(ConfigValue): """Log level value. Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``, ``trace``, or ``all``, with any casing. """ levels = { "critical": logging.CRITICAL, "error": logging.ERROR, "warning": logging.WARNING, "info": logging.INFO, "debug": logging.DEBUG, "trace": log.TRACE_LOG_LEVEL, "all": logging.NOTSET, } def deserialize(self, value): value = decode(value) validators.validate_choice(value.lower(), self.levels.keys()) return self.levels.get(value.lower()) def serialize(self, value, display=False): lookup = {v: k for k, v in self.levels.items()} if value in lookup: return encode(lookup[value]) return "" class Hostname(ConfigValue): """Network hostname value.""" def __init__(self, optional=False): self._required = not optional def deserialize(self, value, display=False): value = decode(value).strip() validators.validate_required(value, self._required) if not value: return None socket_path = path.get_unix_socket_path(value) if socket_path is not None: path_str = Path(not self._required).deserialize(socket_path) return f"unix:{path_str}" try: socket.getaddrinfo(value, None) except OSError: raise ValueError("must be a resolveable hostname or valid IP") return value class Port(Integer): """Network port value. Expects integer in the range 0-65535, zero tells the kernel to simply allocate a port for us. """ def __init__(self, choices=None, optional=False): super().__init__( minimum=0, maximum=2 ** 16 - 1, choices=choices, optional=optional ) class _ExpandedPath(str): def __new__(cls, original, expanded): return super().__new__(cls, expanded) def __init__(self, original, expanded): self.original = original class Path(ConfigValue): """File system path. The following expansions of the path will be done: - ``~`` to the current user's home directory - ``$XDG_CACHE_DIR`` according to the XDG spec - ``$XDG_CONFIG_DIR`` according to the XDG spec - ``$XDG_DATA_DIR`` according to the XDG spec - ``$XDG_MUSIC_DIR`` according to the XDG spec """ def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value).strip() expanded = path.expand_path(value) validators.validate_required(value, self._required) validators.validate_required(expanded, self._required) if not value or expanded is None: return None return _ExpandedPath(value, expanded) def serialize(self, value, display=False): if isinstance(value, _ExpandedPath): value = value.original if isinstance(value, bytes): value = value.decode(errors="surrogateescape") return value
Java
// AnyChatCallCenterServerDlg.h : header file // #if !defined(AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_) #define AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_ #if _MSC_VER > 1000 #pragma once #endif // _MSC_VER > 1000 #include <list> class CAnyChatCallCenterServerDlg : public CDialog { // Construction public: CAnyChatCallCenterServerDlg(CWnd* pParent = NULL); // standard constructor public: CString m_strLogInfo; ///< ±£´æÈÕÖ¾ÐÅÏ¢ // ÏÔʾÈÕÖ¾ÐÅÏ¢ void AppendLogString(CString logstr); // ³õʼ»¯ÒµÎñ¶ÓÁÐ void InitAnyChatQueue(void); // Dialog Data //{{AFX_DATA(CAnyChatCallCenterServerDlg) enum { IDD = IDD_ANYCHATCALLCENTERSERVER_DIALOG }; CEdit m_ctrlEditLog; CComboBox m_ComboStyle; int m_iTargetId; BOOL m_bShowUserLog; //}}AFX_DATA // ClassWizard generated virtual function overrides //{{AFX_VIRTUAL(CAnyChatCallCenterServerDlg) protected: virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support //}}AFX_VIRTUAL // Implementation protected: HICON m_hIcon; // Generated message map functions //{{AFX_MSG(CAnyChatCallCenterServerDlg) virtual BOOL OnInitDialog(); afx_msg void OnPaint(); afx_msg HCURSOR OnQueryDragIcon(); afx_msg void OnDestroy(); afx_msg void OnButtonSendbuf(); afx_msg void OnButtonTransFile(); afx_msg void OnButtonTransBufferEx(); afx_msg void OnButtonTransBuffer(); afx_msg void OnButtonStartRecord(); afx_msg void OnButtonStopRecord(); afx_msg void OnCheckShowLog(); afx_msg void OnTimer(UINT nIDEvent); afx_msg void OnButtonKickOut(); afx_msg void OnButtonHangUp(); //}}AFX_MSG DECLARE_MESSAGE_MAP() }; //{{AFX_INSERT_LOCATION}} // Microsoft Visual C++ will insert additional declarations immediately before the previous line. #endif // !defined(AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_)
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_09-icedtea) on Sat Mar 30 09:57:52 CET 2013 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>org.togglz.core.proxy (Togglz 1.1.1.Final API)</title> <meta name="date" content="2013-03-30"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> </head> <body> <h1 class="bar"><a href="../../../../org/togglz/core/proxy/package-summary.html" target="classFrame">org.togglz.core.proxy</a></h1> <div class="indexContainer"> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="FeatureProxyInvocationHandler.html" title="class in org.togglz.core.proxy" target="classFrame">FeatureProxyInvocationHandler</a></li> </ul> </div> </body> </html>
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.BaselineNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.configuration.PlatformConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.binary.BinaryRawReaderEx; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.cluster.DetachedClusterNode; import org.apache.ignite.internal.logger.platform.PlatformLogger; import org.apache.ignite.internal.processors.GridProcessorAdapter; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl; import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl; import org.apache.ignite.internal.processors.platform.binary.PlatformBinaryProcessor; import org.apache.ignite.internal.processors.platform.cache.PlatformCache; import org.apache.ignite.internal.processors.platform.cache.PlatformCacheExtension; import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity; import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore; import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup; import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicReference; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicSequence; import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore; import org.apache.ignite.internal.processors.platform.memory.PlatformMemory; import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream; import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions; import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.FALSE; import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.TRUE; import static org.apache.ignite.internal.processors.platform.client.ClientConnectionContext.CURRENT_VER; /** * GridGain platform processor. */ @SuppressWarnings({"unchecked"}) public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor, PlatformTarget { /** */ private static final int OP_GET_CACHE = 1; /** */ private static final int OP_CREATE_CACHE = 2; /** */ private static final int OP_GET_OR_CREATE_CACHE = 3; /** */ private static final int OP_CREATE_CACHE_FROM_CONFIG = 4; /** */ private static final int OP_GET_OR_CREATE_CACHE_FROM_CONFIG = 5; /** */ private static final int OP_DESTROY_CACHE = 6; /** */ private static final int OP_GET_AFFINITY = 7; /** */ private static final int OP_GET_DATA_STREAMER = 8; /** */ private static final int OP_GET_TRANSACTIONS = 9; /** */ private static final int OP_GET_CLUSTER_GROUP = 10; /** */ private static final int OP_GET_EXTENSION = 11; /** */ private static final int OP_GET_ATOMIC_LONG = 12; /** */ private static final int OP_GET_ATOMIC_REFERENCE = 13; /** */ private static final int OP_GET_ATOMIC_SEQUENCE = 14; /** */ private static final int OP_GET_IGNITE_CONFIGURATION = 15; /** */ private static final int OP_GET_CACHE_NAMES = 16; /** */ private static final int OP_CREATE_NEAR_CACHE = 17; /** */ private static final int OP_GET_OR_CREATE_NEAR_CACHE = 18; /** */ private static final int OP_LOGGER_IS_LEVEL_ENABLED = 19; /** */ private static final int OP_LOGGER_LOG = 20; /** */ private static final int OP_GET_BINARY_PROCESSOR = 21; /** */ private static final int OP_RELEASE_START = 22; /** */ private static final int OP_ADD_CACHE_CONFIGURATION = 23; /** */ private static final int OP_SET_BASELINE_TOPOLOGY_VER = 24; /** */ private static final int OP_SET_BASELINE_TOPOLOGY_NODES = 25; /** */ private static final int OP_GET_BASELINE_TOPOLOGY = 26; /** */ private static final int OP_DISABLE_WAL = 27; /** */ private static final int OP_ENABLE_WAL = 28; /** */ private static final int OP_IS_WAL_ENABLED = 29; /** */ private static final int OP_SET_TX_TIMEOUT_ON_PME = 30; /** Start latch. */ private final CountDownLatch startLatch = new CountDownLatch(1); /** Stores pending initialization. */ private final Collection<StoreInfo> pendingStores = Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>()); /** Lock for store lifecycle operations. */ private final ReadWriteLock storeLock = new ReentrantReadWriteLock(); /** Logger. */ @SuppressWarnings("FieldCanBeLocal") private final IgniteLogger log; /** Context. */ private final PlatformContext platformCtx; /** Interop configuration. */ private final PlatformConfigurationEx interopCfg; /** Extensions. */ private final PlatformPluginExtension[] extensions; /** Whether processor is started. */ private boolean started; /** Whether processor if stopped (or stopping). */ private volatile boolean stopped; /** Cache extensions. */ private final PlatformCacheExtension[] cacheExts; /** Cluster restart flag for the reconnect callback. */ private volatile boolean clusterRestarted; /** * Constructor. * * @param ctx Kernal context. */ public PlatformProcessorImpl(GridKernalContext ctx) { super(ctx); log = ctx.log(PlatformProcessorImpl.class); PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration(); assert interopCfg0 != null : "Must be checked earlier during component creation."; if (!(interopCfg0 instanceof PlatformConfigurationEx)) throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName()); interopCfg = (PlatformConfigurationEx)interopCfg0; if (!F.isEmpty(interopCfg.warnings())) { for (String w : interopCfg.warnings()) U.warn(log, w); } platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory(), interopCfg.platform()); // Initialize cache extensions (if any). cacheExts = prepareCacheExtensions(interopCfg.cacheExtensions()); if (interopCfg.logger() != null) interopCfg.logger().setContext(platformCtx); // Initialize extensions (if any). extensions = prepareExtensions(ctx.plugins().extensions(PlatformPluginExtension.class)); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { try (PlatformMemory mem = platformCtx.memory().allocate()) { PlatformOutputStream out = mem.output(); BinaryRawWriterEx writer = platformCtx.writer(out); writer.writeString(ctx.igniteInstanceName()); out.synchronize(); platformCtx.gateway().onStart(new PlatformTargetProxyImpl(this, platformCtx), mem.pointer()); } // At this moment all necessary native libraries must be loaded, so we can process with store creation. storeLock.writeLock().lock(); try { for (StoreInfo store : pendingStores) registerStore0(store.store, store.convertBinary); pendingStores.clear(); started = true; } finally { storeLock.writeLock().unlock(); } // Add Interop node attributes. ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform()); } /** {@inheritDoc} */ @Override public void onKernalStop(boolean cancel) { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { if (platformCtx != null) { stopped = true; platformCtx.gateway().onStop(); } } /** {@inheritDoc} */ @Override public Ignite ignite() { return ctx.grid(); } /** {@inheritDoc} */ @Override public long environmentPointer() { return platformCtx.gateway().environmentPointer(); } /** {@inheritDoc} */ @Override public void releaseStart() { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void awaitStart() throws IgniteCheckedException { U.await(startLatch); } /** {@inheritDoc} */ @Override public PlatformContext context() { return platformCtx; } /** {@inheritDoc} */ @Override public void registerStore(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { storeLock.readLock().lock(); try { if (stopped) throw new IgniteCheckedException("Failed to initialize interop store because node is stopping: " + store); if (started) registerStore0(store, convertBinary); else pendingStores.add(new StoreInfo(store, convertBinary)); } finally { storeLock.readLock().unlock(); } } /** {@inheritDoc} */ @Override public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException { platformCtx.gateway().onClientDisconnected(); // 1) onReconnected is called on all grid components. // 2) After all of grid components have completed their reconnection, reconnectFut is completed. reconnectFut.listen(new CI1<IgniteFuture<?>>() { @Override public void apply(IgniteFuture<?> future) { platformCtx.gateway().onClientReconnected(clusterRestarted); } }); } /** {@inheritDoc} */ @Override public IgniteInternalFuture<?> onReconnected(boolean clusterRestarted) throws IgniteCheckedException { // Save the flag value for callback of reconnectFut. this.clusterRestarted = clusterRestarted; return null; } /** * Creates new platform cache. */ private PlatformTarget createPlatformCache(IgniteCacheProxy cache) { assert cache != null; return new PlatformCache(platformCtx, cache, false, cacheExts); } /** * Checks whether logger level is enabled. * * @param level Level. * @return Result. */ private boolean loggerIsLevelEnabled(int level) { IgniteLogger log = ctx.grid().log(); switch (level) { case PlatformLogger.LVL_TRACE: return log.isTraceEnabled(); case PlatformLogger.LVL_DEBUG: return log.isDebugEnabled(); case PlatformLogger.LVL_INFO: return log.isInfoEnabled(); case PlatformLogger.LVL_WARN: return true; case PlatformLogger.LVL_ERROR: return true; default: assert false; } return false; } /** * Logs to the Ignite logger. * * @param level Level. * @param message Message. * @param category Category. * @param errorInfo Exception. */ private void loggerLog(int level, String message, String category, String errorInfo) { IgniteLogger log = ctx.grid().log(); if (category != null) log = log.getLogger(category); Throwable err = errorInfo == null ? null : new IgniteException("Platform error:" + errorInfo); switch (level) { case PlatformLogger.LVL_TRACE: log.trace(message); break; case PlatformLogger.LVL_DEBUG: log.debug(message); break; case PlatformLogger.LVL_INFO: log.info(message); break; case PlatformLogger.LVL_WARN: log.warning(message, err); break; case PlatformLogger.LVL_ERROR: log.error(message, err); break; default: assert false; } } /** {@inheritDoc} */ @Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException { switch (type) { case OP_LOGGER_IS_LEVEL_ENABLED: { return loggerIsLevelEnabled((int) val) ? TRUE : FALSE; } case OP_RELEASE_START: { releaseStart(); return 0; } case OP_SET_BASELINE_TOPOLOGY_VER: { ctx.grid().cluster().setBaselineTopology(val); return 0; } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_DESTROY_CACHE: { ctx.grid().destroyCache(reader.readString()); return 0; } case OP_LOGGER_LOG: { loggerLog(reader.readInt(), reader.readString(), reader.readString(), reader.readString()); return 0; } case OP_SET_BASELINE_TOPOLOGY_NODES: { int cnt = reader.readInt(); Collection<BaselineNode> nodes = new ArrayList<>(cnt); for (int i = 0; i < cnt; i++) { Object consId = reader.readObjectDetached(); Map<String, Object> attrs = PlatformUtils.readNodeAttributes(reader); nodes.add(new DetachedClusterNode(consId, attrs)); } ctx.grid().cluster().setBaselineTopology(nodes); return 0; } case OP_ADD_CACHE_CONFIGURATION: CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); ctx.grid().addCacheConfiguration(cfg); return 0; case OP_DISABLE_WAL: ctx.grid().cluster().disableWal(reader.readString()); return 0; case OP_ENABLE_WAL: ctx.grid().cluster().enableWal(reader.readString()); return 0; case OP_SET_TX_TIMEOUT_ON_PME: ctx.grid().cluster().setTxTimeoutOnPartitionMapExchange(reader.readLong()); return 0; case OP_IS_WAL_ENABLED: return ctx.grid().cluster().isWalEnabled(reader.readString()) ? TRUE : FALSE; } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader, PlatformMemory mem) throws IgniteCheckedException { return processInStreamOutLong(type, reader); } /** {@inheritDoc} */ @Override public void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException { PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processInStreamOutObject(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_GET_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name); if (cache == null) throw new IllegalArgumentException("Cache doesn't exist: " + name); return createPlatformCache(cache); } case OP_CREATE_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name); return createPlatformCache(cache); } case OP_GET_OR_CREATE_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name); return createPlatformCache(cache); } case OP_CREATE_CACHE_FROM_CONFIG: { CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().createCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().createCache(cfg); return createPlatformCache(cache); } case OP_GET_OR_CREATE_CACHE_FROM_CONFIG: { CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg); return createPlatformCache(cache); } case OP_GET_AFFINITY: { return new PlatformAffinity(platformCtx, ctx, reader.readString()); } case OP_GET_DATA_STREAMER: { String cacheName = reader.readString(); boolean keepBinary = reader.readBoolean(); IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName); ldr.keepBinary(true); return new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary); } case OP_GET_EXTENSION: { int id = reader.readInt(); if (extensions != null && id < extensions.length) { PlatformPluginExtension ext = extensions[id]; if (ext != null) { return ext.createTarget(); } } throw new IgniteException("Platform extension is not registered [id=" + id + ']'); } case OP_GET_ATOMIC_LONG: { String name = reader.readString(); long initVal = reader.readLong(); boolean create = reader.readBoolean(); GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create); if (atomicLong == null) return null; return new PlatformAtomicLong(platformCtx, atomicLong); } case OP_GET_ATOMIC_REFERENCE: { String name = reader.readString(); Object initVal = reader.readObjectDetached(); boolean create = reader.readBoolean(); return PlatformAtomicReference.createInstance(platformCtx, name, initVal, create); } case OP_GET_ATOMIC_SEQUENCE: { String name = reader.readString(); long initVal = reader.readLong(); boolean create = reader.readBoolean(); IgniteAtomicSequence atomicSeq = ignite().atomicSequence(name, initVal, create); if (atomicSeq == null) return null; return new PlatformAtomicSequence(platformCtx, atomicSeq); } case OP_CREATE_NEAR_CACHE: { String cacheName = reader.readString(); NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createNearCache(cacheName, cfg); return createPlatformCache(cache); } case OP_GET_OR_CREATE_NEAR_CACHE: { String cacheName = reader.readString(); NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateNearCache(cacheName, cfg); return createPlatformCache(cache); } case OP_GET_TRANSACTIONS: { String lbl = reader.readString(); return new PlatformTransactions(platformCtx, lbl); } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processInObjectStreamOutObjectStream(int type, @Nullable PlatformTarget arg, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException { return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException { switch (type) { case OP_GET_IGNITE_CONFIGURATION: { PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration(), CURRENT_VER); return; } case OP_GET_CACHE_NAMES: { Collection<String> names = ignite().cacheNames(); writer.writeInt(names.size()); for (String name : names) writer.writeString(name); return; } case OP_GET_BASELINE_TOPOLOGY: { Collection<BaselineNode> blt = ignite().cluster().currentBaselineTopology(); writer.writeInt(blt.size()); for (BaselineNode n : blt) { writer.writeObjectDetached(n.consistentId()); PlatformUtils.writeNodeAttributes(writer, n.attributes()); } return; } } PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processOutObject(int type) throws IgniteCheckedException { switch (type) { case OP_GET_TRANSACTIONS: return new PlatformTransactions(platformCtx); case OP_GET_CLUSTER_GROUP: return new PlatformClusterGroup(platformCtx, ctx.grid().cluster()); case OP_GET_BINARY_PROCESSOR: { return new PlatformBinaryProcessor(platformCtx); } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformAsyncResult processInStreamAsync(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public Exception convertException(Exception e) { return e; } /** * Internal store initialization routine. * * @param store Store. * @param convertBinary Convert binary flag. * @throws IgniteCheckedException If failed. */ private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { if (store instanceof PlatformDotNetCacheStore) { PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store; store0.initialize(ctx, convertBinary); } else throw new IgniteCheckedException("Unsupported interop store: " + store); } /** * Prepare cache extensions. * * @param cacheExts Original extensions. * @return Prepared extensions. */ private static PlatformCacheExtension[] prepareCacheExtensions(Collection<PlatformCacheExtension> cacheExts) { if (!F.isEmpty(cacheExts)) { int maxExtId = 0; Map<Integer, PlatformCacheExtension> idToExt = new HashMap<>(); for (PlatformCacheExtension cacheExt : cacheExts) { if (cacheExt == null) throw new IgniteException("Platform cache extension cannot be null."); if (cacheExt.id() < 0) throw new IgniteException("Platform cache extension ID cannot be negative: " + cacheExt); PlatformCacheExtension oldCacheExt = idToExt.put(cacheExt.id(), cacheExt); if (oldCacheExt != null) throw new IgniteException("Platform cache extensions cannot have the same ID [" + "id=" + cacheExt.id() + ", first=" + oldCacheExt + ", second=" + cacheExt + ']'); if (cacheExt.id() > maxExtId) maxExtId = cacheExt.id(); } PlatformCacheExtension[] res = new PlatformCacheExtension[maxExtId + 1]; for (PlatformCacheExtension cacheExt : cacheExts) res[cacheExt.id()]= cacheExt; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformCacheExtension[0]; } /** * Prepare extensions. * * @param exts Original extensions. * @return Prepared extensions. */ private static PlatformPluginExtension[] prepareExtensions(PlatformPluginExtension[] exts) { if (!F.isEmpty(exts)) { int maxExtId = 0; Map<Integer, PlatformPluginExtension> idToExt = new HashMap<>(); for (PlatformPluginExtension ext : exts) { if (ext == null) throw new IgniteException("Platform extension cannot be null."); if (ext.id() < 0) throw new IgniteException("Platform extension ID cannot be negative: " + ext); PlatformPluginExtension oldCacheExt = idToExt.put(ext.id(), ext); if (oldCacheExt != null) throw new IgniteException("Platform extensions cannot have the same ID [" + "id=" + ext.id() + ", first=" + oldCacheExt + ", second=" + ext + ']'); if (ext.id() > maxExtId) maxExtId = ext.id(); } PlatformPluginExtension[] res = new PlatformPluginExtension[maxExtId + 1]; for (PlatformPluginExtension ext : exts) res[ext.id()]= ext; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformPluginExtension[0]; } /** * Store and manager pair. */ private static class StoreInfo { /** Store. */ private final PlatformCacheStore store; /** Convert binary flag. */ private final boolean convertBinary; /** * Constructor. * * @param store Store. * @param convertBinary Convert binary flag. */ private StoreInfo(PlatformCacheStore store, boolean convertBinary) { this.store = store; this.convertBinary = convertBinary; } } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy import java.io._ import java.util.jar.JarFile import java.util.logging.Level import java.util.zip.{ZipEntry, ZipOutputStream} import scala.collection.JavaConversions._ import com.google.common.io.{ByteStreams, Files} import org.apache.spark.{SparkException, Logging} import org.apache.spark.api.r.RUtils import org.apache.spark.util.{RedirectThread, Utils} private[deploy] object RPackageUtils extends Logging { /** The key in the MANIFEST.mf that we look for, in case a jar contains R code. */ private final val hasRPackage = "Spark-HasRPackage" /** Base of the shell command used in order to install R packages. */ private final val baseInstallCmd = Seq("R", "CMD", "INSTALL", "-l") /** R source code should exist under R/pkg in a jar. */ private final val RJarEntries = "R/pkg" /** Documentation on how the R source file layout should be in the jar. */ private[deploy] final val RJarDoc = s"""In order for Spark to build R packages that are parts of Spark Packages, there are a few |requirements. The R source code must be shipped in a jar, with additional Java/Scala |classes. The jar must be in the following format: | 1- The Manifest (META-INF/MANIFEST.mf) must contain the key-value: $hasRPackage: true | 2- The standard R package layout must be preserved under R/pkg/ inside the jar. More | information on the standard R package layout can be found in: | http://cran.r-project.org/doc/contrib/Leisch-CreatingPackages.pdf | An example layout is given below. After running `jar tf $$JAR_FILE | sort`: | |META-INF/MANIFEST.MF |R/ |R/pkg/ |R/pkg/DESCRIPTION |R/pkg/NAMESPACE |R/pkg/R/ |R/pkg/R/myRcode.R |org/ |org/apache/ |... """.stripMargin.trim /** Internal method for logging. We log to a printStream in tests, for debugging purposes. */ private def print( msg: String, printStream: PrintStream, level: Level = Level.FINE, e: Throwable = null): Unit = { if (printStream != null) { // scalastyle:off println printStream.println(msg) // scalastyle:on println if (e != null) { e.printStackTrace(printStream) } } else { level match { case Level.INFO => logInfo(msg) case Level.WARNING => logWarning(msg) case Level.SEVERE => logError(msg, e) case _ => logDebug(msg) } } } /** * Checks the manifest of the Jar whether there is any R source code bundled with it. * Exposed for testing. */ private[deploy] def checkManifestForR(jar: JarFile): Boolean = { val manifest = jar.getManifest.getMainAttributes manifest.getValue(hasRPackage) != null && manifest.getValue(hasRPackage).trim == "true" } /** * Runs the standard R package installation code to build the R package from source. * Multiple runs don't cause problems. */ private def rPackageBuilder( dir: File, printStream: PrintStream, verbose: Boolean, libDir: String): Boolean = { // this code should be always running on the driver. val pathToPkg = Seq(dir, "R", "pkg").mkString(File.separator) val installCmd = baseInstallCmd ++ Seq(libDir, pathToPkg) if (verbose) { print(s"Building R package with the command: $installCmd", printStream) } try { val builder = new ProcessBuilder(installCmd) builder.redirectErrorStream(true) // Put the SparkR package directory into R library search paths in case this R package // may depend on SparkR. val env = builder.environment() val rPackageDir = RUtils.sparkRPackagePath(isDriver = true) env.put("SPARKR_PACKAGE_DIR", rPackageDir.mkString(",")) env.put("R_PROFILE_USER", Seq(rPackageDir(0), "SparkR", "profile", "general.R").mkString(File.separator)) val process = builder.start() new RedirectThread(process.getInputStream, printStream, "redirect R packaging").start() process.waitFor() == 0 } catch { case e: Throwable => print("Failed to build R package.", printStream, Level.SEVERE, e) false } } /** * Extracts the files under /R in the jar to a temporary directory for building. */ private def extractRFolder(jar: JarFile, printStream: PrintStream, verbose: Boolean): File = { val tempDir = Utils.createTempDir(null) val jarEntries = jar.entries() while (jarEntries.hasMoreElements) { val entry = jarEntries.nextElement() val entryRIndex = entry.getName.indexOf(RJarEntries) if (entryRIndex > -1) { val entryPath = entry.getName.substring(entryRIndex) if (entry.isDirectory) { val dir = new File(tempDir, entryPath) if (verbose) { print(s"Creating directory: $dir", printStream) } dir.mkdirs } else { val inStream = jar.getInputStream(entry) val outPath = new File(tempDir, entryPath) Files.createParentDirs(outPath) val outStream = new FileOutputStream(outPath) if (verbose) { print(s"Extracting $entry to $outPath", printStream) } Utils.copyStream(inStream, outStream, closeStreams = true) } } } tempDir } /** * Extracts the files under /R in the jar to a temporary directory for building. */ private[deploy] def checkAndBuildRPackage( jars: String, printStream: PrintStream = null, verbose: Boolean = false): Unit = { jars.split(",").foreach { jarPath => val file = new File(Utils.resolveURI(jarPath)) if (file.exists()) { val jar = new JarFile(file) if (checkManifestForR(jar)) { print(s"$file contains R source code. Now installing package.", printStream, Level.INFO) val rSource = extractRFolder(jar, printStream, verbose) if (RUtils.rPackages.isEmpty) { RUtils.rPackages = Some(Utils.createTempDir().getAbsolutePath) } try { if (!rPackageBuilder(rSource, printStream, verbose, RUtils.rPackages.get)) { print(s"ERROR: Failed to build R package in $file.", printStream) print(RJarDoc, printStream) } } finally { rSource.delete() // clean up } } else { if (verbose) { print(s"$file doesn't contain R source code, skipping...", printStream) } } } else { print(s"WARN: $file resolved as dependency, but not found.", printStream, Level.WARNING) } } } private def listFilesRecursively(dir: File, excludePatterns: Seq[String]): Set[File] = { if (!dir.exists()) { Set.empty[File] } else { if (dir.isDirectory) { val subDir = dir.listFiles(new FilenameFilter { override def accept(dir: File, name: String): Boolean = { !excludePatterns.map(name.contains).reduce(_ || _) // exclude files with given pattern } }) subDir.flatMap(listFilesRecursively(_, excludePatterns)).toSet } else { Set(dir) } } } /** Zips all the R libraries built for distribution to the cluster. */ private[deploy] def zipRLibraries(dir: File, name: String): File = { val filesToBundle = listFilesRecursively(dir, Seq(".zip")) // create a zip file from scratch, do not append to existing file. val zipFile = new File(dir, name) zipFile.delete() val zipOutputStream = new ZipOutputStream(new FileOutputStream(zipFile, false)) try { filesToBundle.foreach { file => // get the relative paths for proper naming in the zip file val relPath = file.getAbsolutePath.replaceFirst(dir.getAbsolutePath, "") val fis = new FileInputStream(file) val zipEntry = new ZipEntry(relPath) zipOutputStream.putNextEntry(zipEntry) ByteStreams.copy(fis, zipOutputStream) zipOutputStream.closeEntry() fis.close() } } finally { zipOutputStream.close() } zipFile } }
Java
//----------------------------------------------------------------------- // <copyright file="NUnitAssertions.cs" company="Akka.NET Project"> // Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using NUnit.Framework; namespace Akka.TestKit.NUnit { /// <summary> /// Assertions for NUnit /// </summary> public class NUnitAssertions : ITestKitAssertions { public void Fail(string format = "", params object[] args) { Assert.Fail(format, args); } public void AssertTrue(bool condition, string format = "", params object[] args) { Assert.IsTrue(condition, format, args); } public void AssertFalse(bool condition, string format = "", params object[] args) { Assert.IsFalse(condition, format, args); } public void AssertEqual<T>(T expected, T actual, string format = "", params object[] args) { Assert.AreEqual(expected, actual, format, args); } public void AssertEqual<T>(T expected, T actual, Func<T, T, bool> comparer, string format = "", params object[] args) { if (!comparer(expected, actual)) throw new AssertionException(string.Format("Assert.AreEqual failed. Expected [{0}]. Actual [{1}]. {2}", FormatValue(expected), FormatValue(actual), string.Format(format, args))); } private static string FormatValue<T>(T expected) { return ReferenceEquals(expected, null) ? "null" : expected.ToString(); } } }
Java
#!/bin/bash set -e readonly url=http://localhost:8080 readonly tmp_file=gerrit wget --retry-connrefused --waitretry=5 --timeout=10 --tries=20 -O "/tmp/$tmp_file" "$url" head -n 4 "/tmp/$tmp_file"
Java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/eks/EKS_EXPORTS.h> #include <aws/eks/model/Cluster.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace EKS { namespace Model { class AWS_EKS_API RegisterClusterResult { public: RegisterClusterResult(); RegisterClusterResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); RegisterClusterResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); inline const Cluster& GetCluster() const{ return m_cluster; } inline void SetCluster(const Cluster& value) { m_cluster = value; } inline void SetCluster(Cluster&& value) { m_cluster = std::move(value); } inline RegisterClusterResult& WithCluster(const Cluster& value) { SetCluster(value); return *this;} inline RegisterClusterResult& WithCluster(Cluster&& value) { SetCluster(std::move(value)); return *this;} private: Cluster m_cluster; }; } // namespace Model } // namespace EKS } // namespace Aws
Java
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2017 the original author or authors. */ package org.assertj.core.util.diff; import java.util.List; /** * Initially copied from https://code.google.com/p/java-diff-utils/. * <p> * Describes the delete-delta between original and revised texts. * * @author <a href="[email protected]">Dmitry Naumenko</a> * @param <T> The type of the compared elements in the 'lines'. */ public class DeleteDelta<T> extends Delta<T> { /** * Creates a change delta with the two given chunks. * * @param original * The original chunk. Must not be {@code null}. * @param revised * The original chunk. Must not be {@code null}. */ public DeleteDelta(Chunk<T> original, Chunk<T> revised) { super(original, revised); } /** * {@inheritDoc} */ @Override public void applyTo(List<T> target) throws IllegalStateException { verify(target); int position = getOriginal().getPosition(); int size = getOriginal().size(); for (int i = 0; i < size; i++) { target.remove(position); } } @Override public TYPE getType() { return Delta.TYPE.DELETE; } @Override public void verify(List<T> target) throws IllegalStateException { getOriginal().verify(target); } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.util.HashMap; import java.util.Map; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.Session; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Before; import org.junit.Test; import org.springframework.jms.core.JmsTemplate; import org.springframework.jms.core.MessageCreator; import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; public class ConsumeJmsMapMessageTest extends CamelTestSupport { protected JmsTemplate jmsTemplate; private MockEndpoint endpoint; @Test public void testConsumeMapMessage() throws Exception { endpoint.expectedMessageCount(1); jmsTemplate.setPubSubDomain(false); jmsTemplate.send("test.map", new MessageCreator() { public Message createMessage(Session session) throws JMSException { MapMessage mapMessage = session.createMapMessage(); mapMessage.setString("foo", "abc"); mapMessage.setString("bar", "xyz"); return mapMessage; } }); endpoint.assertIsSatisfied(); assertCorrectMapReceived(); } protected void assertCorrectMapReceived() { Exchange exchange = endpoint.getReceivedExchanges().get(0); // This should be a JMS Exchange assertNotNull(ExchangeHelper.getBinding(exchange, JmsBinding.class)); JmsMessage in = (JmsMessage) exchange.getIn(); assertNotNull(in); Map<?, ?> map = exchange.getIn().getBody(Map.class); log.info("Received map: " + map); assertNotNull("Should have received a map message!", map); assertIsInstanceOf(MapMessage.class, in.getJmsMessage()); assertEquals("map.foo", "abc", map.get("foo")); assertEquals("map.bar", "xyz", map.get("bar")); assertEquals("map.size", 2, map.size()); } @Test public void testSendMapMessage() throws Exception { endpoint.expectedMessageCount(1); Map<String, String> map = new HashMap<>(); map.put("foo", "abc"); map.put("bar", "xyz"); template.sendBody("direct:test", map); endpoint.assertIsSatisfied(); assertCorrectMapReceived(); } @Override @Before public void setUp() throws Exception { super.setUp(); endpoint = getMockEndpoint("mock:result"); } protected CamelContext createCamelContext() throws Exception { CamelContext camelContext = super.createCamelContext(); ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); jmsTemplate = new JmsTemplate(connectionFactory); camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory)); return camelContext; } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { from("activemq:test.map").to("mock:result"); from("direct:test").to("activemq:test.map"); } }; } }
Java
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.constructionheuristic.greedyFit.decider; public enum ConstructionHeuristicPickEarlyType { NEVER, FIRST_LAST_STEP_SCORE_EQUAL_OR_IMPROVING; }
Java
# # Author:: Adam Jacob (<[email protected]>) # Author:: Tyler Cloke (<[email protected]>) # Copyright:: Copyright 2008-2017, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "spec_helper" describe Chef::Resource::RemoteFile do let(:resource) { Chef::Resource::RemoteFile.new("fakey_fakerton") } describe "name_property" do it "the path property is the name_property" do expect(resource.path).to eql("fakey_fakerton") end end describe "Actions" do it "sets the default action as :create" do expect(resource.action).to eql([:create]) end it "supports :create, :create_if_missing, :delete, :touch actions" do expect { resource.action :create }.not_to raise_error expect { resource.action :create_if_missing }.not_to raise_error expect { resource.action :delete }.not_to raise_error expect { resource.action :touch }.not_to raise_error end end describe "initialize" do it "is a subclass of Chef::Resource::File" do expect(resource).to be_a_kind_of(Chef::Resource::File) end end it "says its provider is RemoteFile when the source is an absolute URI" do resource.source("http://www.google.com/robots.txt") expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile) end it "says its provider is RemoteFile when the source is a network share" do resource.source("\\\\fakey\\fakerton\\fake.txt") expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile) end describe "source" do it "does not have a default value for 'source'" do expect(resource.source).to eql([]) end it "accepts a URI for the remote file source" do resource.source "http://opscode.com/" expect(resource.source).to eql([ "http://opscode.com/" ]) end it "accepts a windows network share source" do resource.source "\\\\fakey\\fakerton\\fake.txt" expect(resource.source).to eql([ "\\\\fakey\\fakerton\\fake.txt" ]) end it "accepts file URIs with spaces" do resource.source("file:///C:/foo bar") expect(resource.source).to eql(["file:///C:/foo bar"]) end it "accepts a delayed evalutator (string) for the remote file source" do resource.source Chef::DelayedEvaluator.new { "http://opscode.com/" } expect(resource.source).to eql([ "http://opscode.com/" ]) end it "accepts an array of URIs for the remote file source" do resource.source([ "http://opscode.com/", "http://puppetlabs.com/" ]) expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "accepts a delated evaluator (array) for the remote file source" do resource.source Chef::DelayedEvaluator.new { [ "http://opscode.com/", "http://puppetlabs.com/" ] } expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "accepts an multiple URIs as arguments for the remote file source" do resource.source("http://opscode.com/", "http://puppetlabs.com/") expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "only accept a single argument if a delayed evalutor is used" do expect do resource.source("http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" }) end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "only accept a single array item if a delayed evalutor is used" do expect do resource.source(["http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" }]) end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "does not accept a non-URI as the source" do expect { resource.source("not-a-uri") }.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "does not accept a non-URI as the source when read from a delayed evaluator" do expect do resource.source(Chef::DelayedEvaluator.new { "not-a-uri" }) resource.source end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "raises an exception when source is an empty array" do expect { resource.source([]) }.to raise_error(ArgumentError) end end describe "checksum" do it "accepts a string for the checksum object" do resource.checksum "asdf" expect(resource.checksum).to eql("asdf") end it "defaults to nil" do expect(resource.checksum).to eq(nil) end end describe "ftp_active_mode" do it "accepts a boolean for the ftp_active_mode object" do resource.ftp_active_mode true expect(resource.ftp_active_mode).to be_truthy end it "defaults to false" do expect(resource.ftp_active_mode).to be_falsey end end describe "conditional get options" do it "defaults to using etags and last modified" do expect(resource.use_etags).to be_truthy expect(resource.use_last_modified).to be_truthy end it "enable or disables etag and last modified options as a group" do resource.use_conditional_get(false) expect(resource.use_etags).to be_falsey expect(resource.use_last_modified).to be_falsey resource.use_conditional_get(true) expect(resource.use_etags).to be_truthy expect(resource.use_last_modified).to be_truthy end it "disables etags indivdually" do resource.use_etags(false) expect(resource.use_etags).to be_falsey expect(resource.use_last_modified).to be_truthy end it "disables last modified individually" do resource.use_last_modified(false) expect(resource.use_last_modified).to be_falsey expect(resource.use_etags).to be_truthy end end describe "when it has group, mode, owner, source, and checksum" do before do if Chef::Platform.windows? resource.path("C:/temp/origin/file.txt") resource.rights(:read, "Everyone") resource.deny_rights(:full_control, "Clumsy_Sam") else resource.path("/this/path/") resource.group("pokemon") resource.mode("0664") resource.owner("root") end resource.source("https://www.google.com/images/srpr/logo3w.png") resource.checksum("1" * 26) end it "describes its state" do state = resource.state_for_resource_reporter if Chef::Platform.windows? puts state expect(state[:rights]).to eq([{ :permissions => :read, :principals => "Everyone" }]) expect(state[:deny_rights]).to eq([{ :permissions => :full_control, :principals => "Clumsy_Sam" }]) else expect(state[:group]).to eq("pokemon") expect(state[:mode]).to eq("0664") expect(state[:owner]).to eq("root") expect(state[:checksum]).to eq("1" * 26) end end it "returns the path as its identity" do if Chef::Platform.windows? expect(resource.identity).to eq("C:/temp/origin/file.txt") else expect(resource.identity).to eq("/this/path/") end end end end
Java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; import com.google.common.annotations.VisibleForTesting; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import org.apache.beam.runners.flink.metrics.FlinkMetricContainer; import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.sdk.values.ValueWithRecordId; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.StoppableFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.DefaultOperatorStateBackend; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source. */ public class UnboundedSourceWrapper< OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark> extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>> implements ProcessingTimeCallback, StoppableFunction, CheckpointListener, CheckpointedFunction { private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class); private final String stepName; /** * Keep the options so that we can initialize the localReaders. */ private final SerializedPipelineOptions serializedOptions; /** * For snapshot and restore. */ private final KvCoder< ? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder; /** * The split sources. We split them in the constructor to ensure that all parallel * sources are consistent about the split sources. */ private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources; /** * The local split sources. Assigned at runtime when the wrapper is executed in parallel. */ private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources; /** * The local split readers. Assigned at runtime when the wrapper is executed in parallel. * Make it a field so that we can access it in {@link #onProcessingTime(long)} for * emitting watermarks. */ private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders; /** * Flag to indicate whether the source is running. * Initialize here and not in run() to prevent races where we cancel a job before run() is * ever called or run() is called after cancel(). */ private volatile boolean isRunning = true; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new * triggers. */ private transient StreamingRuntimeContext runtimeContext; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting * watermarks. */ private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context; /** * Pending checkpoints which have not been acknowledged yet. */ private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints; /** * Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; private transient ListState<KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint; /** * false if checkpointCoder is null or no restore state by starting first. */ private transient boolean isRestored = false; @SuppressWarnings("unchecked") public UnboundedSourceWrapper( String stepName, PipelineOptions pipelineOptions, UnboundedSource<OutputT, CheckpointMarkT> source, int parallelism) throws Exception { this.stepName = stepName; this.serializedOptions = new SerializedPipelineOptions(pipelineOptions); if (source.requiresDeduping()) { LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source); } Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder(); if (checkpointMarkCoder == null) { LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots."); checkpointCoder = null; } else { Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder = (Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() { }); checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder); } // get the splits early. we assume that the generated splits are stable, // this is necessary so that the mapping of state to source is correct // when restoring splitSources = source.split(parallelism, pipelineOptions); } /** * Initialize and restore state before starting execution of the source. */ @Override public void open(Configuration parameters) throws Exception { runtimeContext = (StreamingRuntimeContext) getRuntimeContext(); // figure out which split sources we're responsible for int subtaskIndex = runtimeContext.getIndexOfThisSubtask(); int numSubtasks = runtimeContext.getNumberOfParallelSubtasks(); localSplitSources = new ArrayList<>(); localReaders = new ArrayList<>(); pendingCheckpoints = new LinkedHashMap<>(); if (isRestored) { // restore the splitSources from the checkpoint to ensure consistent ordering for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored: stateForCheckpoint.get()) { localSplitSources.add(restored.getKey()); localReaders.add(restored.getKey().createReader( serializedOptions.getPipelineOptions(), restored.getValue())); } } else { // initialize localReaders and localSources from scratch for (int i = 0; i < splitSources.size(); i++) { if (i % numSubtasks == subtaskIndex) { UnboundedSource<OutputT, CheckpointMarkT> source = splitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = source.createReader(serializedOptions.getPipelineOptions(), null); localSplitSources.add(source); localReaders.add(reader); } } } LOG.info("Unbounded Flink Source {}/{} is reading from sources: {}", subtaskIndex, numSubtasks, localSplitSources); } @Override public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception { context = ctx; FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker = new ReaderInvocationUtil<>( stepName, serializedOptions.getPipelineOptions(), metricContainer); if (localReaders.size() == 0) { // do nothing, but still look busy ... // also, output a Long.MAX_VALUE watermark since we know that we're not // going to emit anything // we can't return here since Flink requires that all operators stay up, // otherwise checkpointing would not work correctly anymore ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); // wait until this is canceled final Object waitLock = new Object(); while (isRunning) { try { // Flink will interrupt us at some point //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (waitLock) { // don't wait indefinitely, in case something goes horribly wrong waitLock.wait(1000); } } catch (InterruptedException e) { if (!isRunning) { // restore the interrupted state, and fall through the loop Thread.currentThread().interrupt(); } } } } else if (localReaders.size() == 1) { // the easy case, we just read from one reader UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0); boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } setNextWatermarkTimer(this.runtimeContext); while (isRunning) { dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); } else { Thread.sleep(50); } } } else { // a bit more complicated, we are responsible for several localReaders // loop through them and sleep if none of them had any data int numReaders = localReaders.size(); int currentReader = 0; // start each reader and emit data if immediately available for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } } // a flag telling us whether any of the localReaders had data // if no reader had data, sleep for bit boolean hadData = false; while (isRunning) { UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader); boolean dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); hadData = true; } currentReader = (currentReader + 1) % numReaders; if (currentReader == 0 && !hadData) { Thread.sleep(50); } else if (currentReader == 0) { hadData = false; } } } } /** * Emit the current element from the given Reader. The reader is guaranteed to have data. */ private void emitElement( SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx, UnboundedSource.UnboundedReader<OutputT> reader) { // make sure that reader state update and element emission are atomic // with respect to snapshots synchronized (ctx.getCheckpointLock()) { OutputT item = reader.getCurrent(); byte[] recordId = reader.getCurrentRecordId(); Instant timestamp = reader.getCurrentTimestamp(); WindowedValue<ValueWithRecordId<OutputT>> windowedValue = WindowedValue.of(new ValueWithRecordId<>(item, recordId), timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING); ctx.collectWithTimestamp(windowedValue, timestamp.getMillis()); } } @Override public void close() throws Exception { super.close(); if (localReaders != null) { for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { reader.close(); } } } @Override public void cancel() { isRunning = false; } @Override public void stop() { isRunning = false; } // ------------------------------------------------------------------------ // Checkpoint and restore // ------------------------------------------------------------------------ @Override public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { if (!isRunning) { LOG.debug("snapshotState() called on closed source"); } else { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } stateForCheckpoint.clear(); long checkpointId = functionSnapshotContext.getCheckpointId(); // we checkpoint the sources along with the CheckpointMarkT to ensure // than we have a correct mapping of checkpoints to sources when // restoring List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size()); for (int i = 0; i < localSplitSources.size(); i++) { UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i); @SuppressWarnings("unchecked") CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark(); checkpointMarks.add(mark); KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv = KV.of(source, mark); stateForCheckpoint.add(kv); } // cleanup old pending checkpoints and add new checkpoint int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS; if (diff >= 0) { for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); diff >= 0; diff--) { iterator.next(); iterator.remove(); } } pendingCheckpoints.put(checkpointId, checkpointMarks); } } @Override public void initializeState(FunctionInitializationContext context) throws Exception { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } OperatorStateStore stateStore = context.getOperatorStateStore(); CoderTypeInformation< KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder); stateForCheckpoint = stateStore.getOperatorState( new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME, typeInformation.createSerializer(new ExecutionConfig()))); if (context.isRestored()) { isRestored = true; LOG.info("Having restore state in the UnbounedSourceWrapper."); } else { LOG.info("No restore state for UnbounedSourceWrapper."); } } @Override public void onProcessingTime(long timestamp) throws Exception { if (this.isRunning) { synchronized (context.getCheckpointLock()) { // find minimum watermark over all localReaders long watermarkMillis = Long.MAX_VALUE; for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { Instant watermark = reader.getWatermark(); if (watermark != null) { watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis); } } context.emitWatermark(new Watermark(watermarkMillis)); } setNextWatermarkTimer(this.runtimeContext); } } private void setNextWatermarkTimer(StreamingRuntimeContext runtime) { if (this.isRunning) { long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval(); long timeToNextWatermark = getTimeToNextWatermark(watermarkInterval); runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this); } } private long getTimeToNextWatermark(long watermarkInterval) { return System.currentTimeMillis() + watermarkInterval; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() { return splitSources; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() { return localSplitSources; } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId); if (checkpointMarks != null) { // remove old checkpoints including the current one Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); long currentId; do { currentId = iterator.next(); iterator.remove(); } while (currentId != checkpointId); // confirm all marks for (CheckpointMarkT mark : checkpointMarks) { mark.finalizeCheckpoint(); } } } }
Java
/* * Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; angular.module('ui.widgets') .directive('wtNvd3LineChart', function ($filter) { return { restrict: 'A', replace: true, templateUrl: 'template/widgets/nvd3LineChart/nvd3LineChart.html', scope: { data: '=data', showLegend: '@', showTimeRange: '=?', timeAxisFormat: '=?' }, controller: function ($scope) { var filter = $filter('date'); var numberFilter = $filter('number'); $scope.xAxisTickFormatFunction = function () { return function (d) { return filter(d, $scope.timeAxisFormat); }; }; $scope.yAxisTickFormatFunction = function () { return function (d) { if (d > 999) { var value; var scale; if (d < 999999) { value = Math.round(d/1000); scale = 'k'; } else { value = Math.round(d/1000000); scale = 'm'; } return numberFilter(value) + scale; } else { return numberFilter(d); } }; }; $scope.xFunction = function () { return function (d) { return d.timestamp; }; }; $scope.yFunction = function () { return function (d) { return d.value; }; }; }, link: function postLink(scope, element, attrs) { if (!_.has(attrs, 'showTimeRange')) { scope.showTimeRange = true; } scope.timeAxisFormat = scope.timeAxisFormat || 'HH:mm'; scope.$watch('data', function (data) { if (data && data[0] && data[0].values && (data[0].values.length > 1)) { var timeseries = _.sortBy(data[0].values, function (item) { return item.timestamp; }); var start = timeseries[0].timestamp; var end = timeseries[timeseries.length - 1].timestamp; scope.start = start; scope.end = end; } }); } }; });
Java
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package blb import ( "k8s.io/autoscaler/cluster-autoscaler/cloudprovider/baiducloud/baiducloud-sdk-go/bce" ) // Endpoint contains all endpoints of Baidu Cloud BCC. var Endpoint = map[string]string{ "bj": "blb.bj.baidubce.com", "gz": "blb.gz.baidubce.com", "su": "blb.su.baidubce.com", "hk": "blb.hkg.baidubce.com", "bd": "blb.bd.baidubce.com", } // Client is the BLB client implemention for Baidu Cloud BLB API. type Client struct { *bce.Client } // NewBLBClient new a client for BLB func NewBLBClient(config *bce.Config) *Client { bceClient := bce.NewClient(config) return &Client{bceClient} } // GetURL generates the full URL of http request for Baidu Cloud BLB API. func (c *Client) GetURL(version string, params map[string]string) string { host := c.Endpoint if host == "" { host = Endpoint[c.GetRegion()] } uriPath := version return c.Client.GetURL(host, uriPath, params) }
Java
/* Copyright 2018 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ try { Object.defineProperty(Array.prototype, "peek", { value: function () { return (this.length > 0 ? this[this.length - 1] : undefined); } }); } catch (e) { } try { Object.defineProperty(String.prototype, "replaceAll", { value: function replaceAll(oldVal, newVal) { return (this.split(oldVal).join(newVal)); } }); } catch (e) { } var RSMB = 1381190978; var memoryLocation = { 0x1: 'Other', 0x2: 'Unknown', 0x3: 'System Board', 0x4: 'ISA', 0x5: 'EISA', 0x6: 'PCI', 0x7: 'MCA', 0x8: 'PCMCIA', 0x9: 'Proprietary', 0xA: 'NuBus', 0xA0: 'PC-98/C20', 0xA1: 'PC-98/C24', 0xA2: 'PC-98/E', 0xA3: 'PC-98/LB' }; var wakeReason = ['Reserved', 'Other', 'Unknown', 'APM Timer', 'Modem Ring', 'LAN', 'Power Switch', 'PCI', 'AC Power']; // Fill the left with zeros until the string is of a given length function zeroLeftPad(str, len) { if ((len == null) && (typeof (len) != 'number')) { return null; } if (str == null) str = ''; // If null, this is to generate zero leftpad string var zlp = ''; for (var i = 0; i < len - str.length; i++) { zlp += '0'; } return zlp + str; } function SMBiosTables() { this._ObjectID = 'SMBiosTable'; if (process.platform == 'win32') { this._marshal = require('_GenericMarshal'); this._native = this._marshal.CreateNativeProxy("Kernel32.dll"); this._native.CreateMethod('EnumSystemFirmwareTables'); this._native.CreateMethod('GetSystemFirmwareTable'); } if (process.platform == 'linux') { this._canonicalizeData = function _canonicalizeData(data) { var lines = data.toString().split('Header and Data:\x0A'); var MemoryStream = require('MemoryStream'); var ms = new MemoryStream(); for (var i = 1; i < lines.length; ++i) { var tokens = lines[i].split('Strings:\x0A'); var header = tokens[0].split('\x0A\x0A')[0].replaceAll('\x0A', '').trim().replaceAll(' ', '').replaceAll('\x09', ''); ms.write(Buffer.from(header, 'hex')); if (tokens.length > 1) { var strings = tokens[1].split('\x0A\x0A')[0].split('\x0A'); var stringsFinal = []; for (var strx in strings) { var tmp = strings[strx].trim().replaceAll(' ', '').replaceAll('\x09', ''); if (!(tmp[0] == '"')) { stringsFinal.push(tmp); } } ms.write(Buffer.from(stringsFinal.join(''), 'hex')); ms.write(Buffer.from('00', 'hex')); } else { ms.write(Buffer.from('0000', 'hex')); } } var retVal = ms.buffer; retVal.ms = ms; return (retVal); }; } this._parse = function _parse(SMData) { var ret = {}; var pbyte; var i = 0 var SMData; var structcount = 0; while (SMData && i < SMData.length) { var SMtype = SMData[i]; var SMlength = SMData[i + 1]; if (!ret[SMtype]) { ret[SMtype] = []; } ret[SMtype].push(SMData.slice(i + 4, i + SMlength)); if (process.platform == 'win32') { ret[SMtype].peek()._ext = pbyte; } i += SMlength; ret[SMtype].peek()._strings = []; while (SMData[i] != 0 && i <= SMData.length) { var strstart = i; // Start of String, find end of string while (SMData[i++] != 0 && i <= SMData.length); try { ret[SMtype].peek()._strings.push(SMData.slice(strstart, i).toString().trim()); } catch (ee) { } } i += (ret[SMtype].peek()._strings.length == 0) ? 2 : 1; ++structcount; //console.log('End of Table[' + SMtype + ']: ' + i); } //console.log('Struct Count = ' + structcount); return (ret); }; this.get = function get(callback) { if (process.platform == 'win32') { var size = this._native.GetSystemFirmwareTable(RSMB, 0, 0, 0).Val; //console.log('Table Size: ' + size); var PtrSize = this._marshal.CreatePointer()._size; var buffer = this._marshal.CreateVariable(size); var written = this._native.GetSystemFirmwareTable(RSMB, 0, buffer, size).Val; //console.log('Written Size: ' + written); var rawBuffer = buffer.toBuffer(); var length = buffer.Deref(4, 4).toBuffer().readUInt32LE(0); pbyte = buffer.Deref(8, length); SMData = pbyte.toBuffer(); if (callback) { callback.apply(this, [this._parse(SMData)]); return; } else { return (this._parse(SMData)); } } if (process.platform == 'linux') { var MemoryStream = require('MemoryStream'); this.child = require('child_process').execFile('/usr/sbin/dmidecode', ['dmidecode', '-u']); this.child.SMBiosTable = this; this.child.ms = new MemoryStream(); this.child.ms.callback = callback; this.child.ms.child = this.child; this.child.stdout.on('data', function (buffer) { this.parent.ms.write(buffer); }); this.child.on('exit', function () { this.ms.end(); }); this.child.ms.on('end', function () { //console.log('read ' + this.buffer.length + ' bytes'); if (this.buffer.length < 300) { //console.log('Not enough permission to read SMBiosTable'); if (this.callback) { this.callback.apply(this.child.SMBiosTable, []); } } else { var SMData = this.child.SMBiosTable._canonicalizeData(this.buffer); var j = this.child.SMBiosTable._parse(SMData); if (this.callback) { this.callback.apply(this.child.SMBiosTable, [j]); } } }); return; } if (callback) { callback.apply(this, [null]); return; } else { return (null); } }; this.parse = function parse(data) { var r = {}; try { r.processorInfo = this.processorInfo(data); } catch(e) { } try { r.memoryInfo = this.memoryInfo(data); } catch(e) { } try { r.systemInfo = this.systemInfo(data); } catch(e) { } try { r.systemSlots = this.systemInfo(data); } catch(e) { } try { r.amtInfo = this.amtInfo(data); } catch(e) { } try { if (JSON.stringify(r).length > 65535) { r = {}; } } catch(ee) {} return r; } this.processorInfo = function processorInfo(data) { if (!data) { throw ('no data'); } var ret = []; var ptype = ['ERROR', 'Other', 'Unknown', 'CPU', 'ALU', 'DSP', 'GPU']; var statusString = ['Unknown', 'Enabled', 'Disabled by user', 'Disabled by BIOS', 'Idle', 'Reserved', 'Reserved', 'Other']; var cpuid = 0; while (data[4] && data[4].length > 0) { var p = data[4].pop(); var populated = p[20] & 0x40; var status = p[20] & 0x07 if (populated) { var j = { _ObjectID: 'SMBiosTables.processorInfo' }; j.Processor = ptype[p[1]]; j.MaxSpeed = p.readUInt16LE(16) + ' Mhz'; if (p[31]) { j.Cores = p[31]; } if (p[33]) { j.Threads = p[33]; } j.Populated = 1; j.Status = statusString[status]; j.Socket = p._strings[p[0] - 1]; j.Manufacturer = p._strings[p[3] - 1]; j.Version = p._strings[p[12] - 1]; ret.push(j); } } return (ret); }; this.memoryInfo = function memoryInfo(data) { if (!data) { throw ('no data'); } var retVal = { _ObjectID: 'SMBiosTables.memoryInfo' }; if (data[16]) { var m = data[16].peek(); retVal.location = memoryLocation[m[0]]; if ((retVal.maxCapacityKb = m.readUInt32LE(3)) == 0x80000000) { retVal.maxCapacityKb = 'A really big number'; } } return (retVal); }; this.systemInfo = function systemInfo(data) { if (!data) { throw ('no data'); } var retVal = { _ObjectID: 'SMBiosTables.systemInfo' }; if (data[1]) { var si = data[1].peek(); var uuid = si.slice(4, 20); retVal.uuid = [zeroLeftPad(uuid.readUInt32LE(0).toString(16), 8), zeroLeftPad(uuid.readUInt16LE(4).toString(16), 4), zeroLeftPad(uuid.readUInt16LE(6).toString(16), 4), zeroLeftPad(uuid.readUInt16BE(8).toString(16), 4), zeroLeftPad(uuid.slice(10).toString('hex').toLowerCase(), 12)].join('-'); retVal.wakeReason = wakeReason[si[20]]; } return (retVal); }; this.systemSlots = function systemSlots(data) { if (!data) { throw ('no data'); } var retVal = []; if (data[9]) { while (data[9].length > 0) { var ss = data[9].pop(); retVal.push({ name: ss._strings[ss[0] - 1] }); } } return (retVal); }; this.amtInfo = function amtInfo(data) { if (!data) { throw ('no data'); } var retVal = { AMT: false }; if (data[130] && data[130].peek().slice(0, 4).toString() == '$AMT') { var amt = data[130].peek(); retVal.AMT = amt[4] ? true : false; if (retVal.AMT) { retVal.enabled = amt[5] ? true : false; retVal.storageRedirection = amt[6] ? true : false; retVal.serialOverLan = amt[7] ? true : false; retVal.kvm = amt[14] ? true : false; if (data[131].peek() && data[131].peek().slice(52, 56).toString() == 'vPro') { var settings = data[131].peek(); if (settings[0] & 0x04) { retVal.TXT = (settings[0] & 0x08) ? true : false; } if (settings[0] & 0x10) { retVal.VMX = (settings[0] & 0x20) ? true : false; } retVal.MEBX = settings.readUInt16LE(4).toString() + '.' + settings.readUInt16LE(6).toString() + '.' + settings.readUInt16LE(8).toString() + '.' + settings.readUInt16LE(10).toString(); var mecap = settings.slice(20, 32); retVal.ManagementEngine = mecap.readUInt16LE(6).toString() + '.' + mecap.readUInt16LE(4).toString() + '.' + mecap.readUInt16LE(10).toString() + '.' + mecap.readUInt16LE(8).toString(); //var lan = settings.slice(36, 48); //console.log(lan.toString('hex')); //retVal.LAN = (lan.readUInt16LE(10) & 0x03).toString() + '/' + ((lan.readUInt16LE(10) & 0xF8) >> 3).toString(); //console.log(lan.readUInt16LE(3)); //retVal.WLAN = (lan.readUInt16LE(3) & 0x07).toString() + '/' + ((lan.readUInt16LE(3) & 0xF8) >> 3).toString() + '/' + (lan.readUInt16LE(3) >> 8).toString(); } } } return (retVal); }; this.smTableTypes = { 0: 'BIOS information', 1: 'System information', 2: 'Baseboard (or Module) information', 4: 'Processor information', 5: 'memory controller information', 6: 'Memory module information', 7: 'Cache information', 8: 'Port connector information', 9: 'System slots', 10: 'On board devices information', 11: 'OEM strings', 12: 'System configuration options', 13: 'BIOS language information', 14: 'Group associations', 15: 'System event log', 16: 'Physical memory array', 17: 'Memory device', 18: '32bit memory error information', 19: 'Memory array mapped address', 20: 'Memory device mapped address', 21: 'Built-in pointing device', 22: 'Portable battery', 23: 'System reset', 24: 'Hardware security', 25: 'System power controls', 26: 'Voltage probe', 27: 'Cooling device', 28: 'Temperature probe', 29: 'Electrical current probe', 30: 'Out-of-band remote access', 31: 'Boot integrity services (BIS) entry point', 32: 'System boot information', 33: '64bit memory error information', 34: 'Management device', 35: 'Management device component', 36: 'Management device threshold data', 37: 'Memory channel', 38: 'IPMI device information', 39: 'System power supply', 40: 'Additional information', 41: 'Onboard devices extended information', 42: 'Management controller host interface', 126: 'Inactive', 127: 'End-of-table' } } module.exports = new SMBiosTables();
Java
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.mss.internal.mime; import org.junit.Assert; import org.junit.Test; /** * Test the functionality of MimeMapper */ public class MimeMapperTest { @Test public void testMimeMappingForKnownExtension() throws MimeMappingException { String mimeType = MimeMapper.getMimeType("png"); Assert.assertEquals("image/png", mimeType); } @Test(expected = MimeMappingException.class) public void testMimeMappingForUnknownExtension() throws MimeMappingException { MimeMapper.getMimeType("unknownext"); } }
Java
form.style { clear: both; } form.style label { width:100px; display:block; float:left; padding-top:4px; font-size:14px; color:#FFF; text-align:right; padding-right:30px; } form.style label.long { width: auto; display: inline; float:none; padding: 0; } form.style input, form.style textarea { padding:3px 6px 3px 6px; font-size:14px; background-color:#EEE; border:2px solid #999; font-family:"Trebuchet MS"; color:#0099FF; width:200px; } form.style textarea { width: 300px} form.style input.radio {width: 30px;} form.style input:focus, form.style textarea:focus { border-color:#00A8FF } form.style input.submit { color:#FFF; background-color:#0F414F; border-color:#000000; width:100px; } form.style fieldset { border:0 } form.style h2 { margin-bottom:25px; margin-top:10px; width: 60%; border-bottom: 1px dashed #00A8FF; padding-bottom: 7px; padding-left:10px; font-size:20px } .contact_l { margin-left:80px !important; margin-left: 50px; width: 250px; float:left } .contact_l img { vertical-align: middle;} .contact_d { width: 500px; float:left }
Java
# AUTOGENERATED FILE FROM balenalib/nitrogen6x-debian:stretch-run # A few reasons for installing distribution-provided OpenJDK: # # 1. Oracle. Licensing prevents us from redistributing the official JDK. # # 2. Compiling OpenJDK also requires the JDK to be installed, and it gets # really hairy. # # For some sample build times, see Debian's buildd logs: # https://buildd.debian.org/status/logs.php?pkg=openjdk-11 RUN apt-get update && apt-get install -y --no-install-recommends \ bzip2 \ unzip \ xz-utils \ && rm -rf /var/lib/apt/lists/* RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list # Default to UTF-8 file.encoding ENV LANG C.UTF-8 # add a simple script that can auto-detect the appropriate JAVA_HOME value # based on whether the JDK or only the JRE is installed RUN { \ echo '#!/bin/sh'; \ echo 'set -e'; \ echo; \ echo 'dirname "$(dirname "$(readlink -f "$(which javac || which java)")")"'; \ } > /usr/local/bin/docker-java-home \ && chmod +x /usr/local/bin/docker-java-home # do some fancy footwork to create a JAVA_HOME that's cross-architecture-safe RUN ln -svT "/usr/lib/jvm/java-11-openjdk-$(dpkg --print-architecture)" /docker-java-home ENV JAVA_HOME /docker-java-home RUN set -ex; \ \ # deal with slim variants not having man page directories (which causes "update-alternatives" to fail) if [ ! -d /usr/share/man/man1 ]; then \ mkdir -p /usr/share/man/man1; \ fi; \ \ apt-get update; \ apt-get install -y --no-install-recommends \ openjdk-11-jre-headless \ ; \ rm -rf /var/lib/apt/lists/*; \ \ rm -vf /usr/local/bin/java; \ \ # ca-certificates-java does not work on src:openjdk-11: (https://bugs.debian.org/914424, https://bugs.debian.org/894979, https://salsa.debian.org/java-team/ca-certificates-java/commit/813b8c4973e6c4bb273d5d02f8d4e0aa0b226c50#d4b95d176f05e34cd0b718357c532dc5a6d66cd7_54_56) keytool -importkeystore -srckeystore /etc/ssl/certs/java/cacerts -destkeystore /etc/ssl/certs/java/cacerts.jks -deststoretype JKS -srcstorepass changeit -deststorepass changeit -noprompt; \ mv /etc/ssl/certs/java/cacerts.jks /etc/ssl/certs/java/cacerts; \ /var/lib/dpkg/info/ca-certificates-java.postinst configure; \ \ # verify that "docker-java-home" returns what we expect [ "$(readlink -f "$JAVA_HOME")" = "$(docker-java-home)" ]; \ \ # update-alternatives so that future installs of other OpenJDK versions don't change /usr/bin/java update-alternatives --get-selections | awk -v home="$(readlink -f "$JAVA_HOME")" 'index($3, home) == 1 { $2 = "manual"; print | "update-alternatives --set-selections" }'; \ # ... and verify that it actually worked for one of the alternatives we care about update-alternatives --query java | grep -q 'Status: manual' CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Stretch \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nOpenJDK v11-jre \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
Java
/** * vue app * Created by HC on 2016/7/19. */ var header = Vue.extend({ template: '#header' }); // 全局注册组件 Vue.component('my-header', header); var footer = Vue.extend({ template: '#footer' }); // 全局注册组件 Vue.component('my-footer', footer); var index = Vue.extend({ template: '#index' }); var App = Vue.extend({}); var router = new VueRouter(); router.map({ '/': { component: index }, '/bar': { component: footer } }); // Now we can start the app! // The router will create an instance of App and mount to // the element matching the selector #app. router.start(App, '#app');
Java
<data name="commentPage" th:currentPage="${param.currentPage}" th:moduleType="${param.moduleType}" th:moduleId="${param.moduleId}" th:mode="${param.mode}" th:asc="${param.asc}"/> <fragment name="评论" />
Java
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; using System.Web.UI.WebControls; using System.Xml; namespace OpenRiaServices.DomainServices.Server { /// <summary> /// Represents a domain operation method within a DomainService /// </summary> public abstract class DomainOperationEntry { private DomainOperation _operation; private ReadOnlyCollection<DomainOperationParameter> _effectiveParameters; private bool _hasOutCountParameter; private string _methodName; private Attribute _operationAttribute; private AttributeCollection _attributes; private Type _associatedType; private Type _actualReturnType; private Type _returnType; private Type _domainServiceType; private bool? _requiresValidation; private bool? _requiresAuthorization; private Func<object, object> _unwrapTaskResultFunc; /// <summary> /// Initializes a new instance of the DomainOperationEntry class /// </summary> /// <param name="domainServiceType">The <see cref="DomainService"/> Type this operation is a member of.</param> /// <param name="name">The name of the operation</param> /// <param name="operation">The <see cref="DomainOperation"/></param> /// <param name="returnType">The return Type of the operation</param> /// <param name="parameters">The parameter definitions for the operation</param> /// <param name="attributes">The method level attributes for the operation</param> protected DomainOperationEntry(Type domainServiceType, string name, DomainOperation operation, Type returnType, IEnumerable<DomainOperationParameter> parameters, AttributeCollection attributes) { if (string.IsNullOrEmpty(name)) { throw new ArgumentNullException("name"); } if (returnType == null) { throw new ArgumentNullException("returnType"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } if (attributes == null) { throw new ArgumentNullException("attributes"); } if (domainServiceType == null) { throw new ArgumentNullException("domainServiceType"); } if (operation == DomainOperation.None) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Resource.InvalidDomainOperationEntryType, Enum.GetName(typeof(DomainOperation), operation))); } bool isTaskType = TypeUtility.IsTaskType(returnType); this._methodName = isTaskType ? RemoveAsyncFromName(name) : name; this._actualReturnType = returnType; this._returnType = isTaskType ? TypeUtility.GetTaskReturnType(returnType) : returnType; this._attributes = attributes; this._operation = operation; this._domainServiceType = domainServiceType; List<DomainOperationParameter> effectiveParameters = parameters.ToList(); int paramCount = effectiveParameters.Count; if (paramCount > 0) { DomainOperationParameter lastParameter = effectiveParameters[paramCount - 1]; if (lastParameter.IsOut && lastParameter.ParameterType.HasElementType && lastParameter.ParameterType.GetElementType() == typeof(int)) { this._hasOutCountParameter = true; effectiveParameters = effectiveParameters.Take(paramCount - 1).ToList(); } } this._effectiveParameters = effectiveParameters.AsReadOnly(); } /// <summary> /// Removes any trailing "Async" from the specific name. /// </summary> /// <param name="name">A name.</param> /// <returns>name, but without "Async" at the end</returns> private static string RemoveAsyncFromName(string name) { const string async = "Async"; if (name.EndsWith(async) && name.Length > async.Length) return name.Substring(0, name.Length - async.Length); else return name; } /// <summary> /// Gets a string value indicating the logical operation type /// corresponding to the current <see cref="Operation"/> value. /// </summary> /// <value> /// The value returned by this property is used in <see cref="System.ComponentModel.DataAnnotations.AuthorizationContext.OperationType"/> /// to describe the category of operation being authorized. /// <para>This helper property exists to avoid the overhead of <see cref="Enum.GetName"/> and /// to map"Custom" into "Update". These strings are not localized because they are meant /// to be used in authorization rules that work independent of culture. /// </para> /// </value> internal string OperationType { get { switch (this.Operation) { case DomainOperation.Query: return "Query"; case DomainOperation.Insert: return "Insert"; case DomainOperation.Update: case DomainOperation.Custom: return "Update"; case DomainOperation.Delete: return "Delete"; case DomainOperation.Invoke: return "Invoke"; default: System.Diagnostics.Debug.Fail("Unknown DomainOperation type"); return "Unknown"; } } } /// <summary> /// Gets the <see cref="DomainService"/> Type this operation is a member of. /// </summary> public Type DomainServiceType { get { return this._domainServiceType; } } /// <summary> /// Gets the name of the operation /// </summary> public string Name { get { return this._methodName; } } /// <summary> /// Gets the attribute that contains metadata about the operation. /// </summary> public Attribute OperationAttribute { get { this.InitializeOperationAttribute(); return this._operationAttribute; } } /// <summary> /// Gets a value indicating whether this operation requires validation. /// </summary> internal bool RequiresValidation { get { if (!this._requiresValidation.HasValue) { // Determine whether this operation requires validation. this._requiresValidation = this._attributes[typeof(ValidationAttribute)] != null; if (!this._requiresValidation.Value) { this._requiresValidation = this.Parameters.Any(p => p.Attributes[typeof(ValidationAttribute)] != null); } if (!this._requiresValidation.Value) { this._requiresValidation = this.Parameters.Any(p => { // Complex parameters need to be validated if validation occurs on the // type itself. if (TypeUtility.IsSupportedComplexType(p.ParameterType)) { Type complexType = TypeUtility.GetElementType(p.ParameterType); MetaType metaType = MetaType.GetMetaType(complexType); return metaType.RequiresValidation; } return false; }); } } return this._requiresValidation.Value; } } /// <summary> /// Gets a value indicating whether this operation requires authorization. /// </summary> internal bool RequiresAuthorization { get { if (!this._requiresAuthorization.HasValue) { // Determine whether this operation requires authorization. AuthorizationAttributes may appear on // the DomainService type as well as the DomainOperationEntry method. this._requiresAuthorization = this._attributes[typeof(AuthorizationAttribute)] != null; if (!this._requiresAuthorization.Value) { this._requiresAuthorization = DomainServiceDescription.GetDescription(this._domainServiceType).Attributes[typeof(AuthorizationAttribute)] != null; } } return this._requiresAuthorization.Value; } } /// <summary> /// Based on the operation type specified, create the default corresponding attribute /// if it hasn't been specified explicitly, and add it to the attributes collection. /// </summary> private void InitializeOperationAttribute() { if (this._operationAttribute != null) { return; } bool attributeCreated = false; switch (this._operation) { case DomainOperation.Query: this._operationAttribute = this._attributes[typeof(QueryAttribute)]; if (this._operationAttribute == null) { QueryAttribute qa = new QueryAttribute(); // singleton returning query methods aren't composable qa.IsComposable = TypeUtility.FindIEnumerable(this.ReturnType) != null; this._operationAttribute = qa; attributeCreated = true; } break; case DomainOperation.Insert: this._operationAttribute = this._attributes[typeof(InsertAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new InsertAttribute(); attributeCreated = true; } break; case DomainOperation.Update: this._operationAttribute = this._attributes[typeof(UpdateAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new UpdateAttribute(); attributeCreated = true; } break; case DomainOperation.Delete: this._operationAttribute = this._attributes[typeof(DeleteAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new DeleteAttribute(); attributeCreated = true; } break; case DomainOperation.Invoke: this._operationAttribute = this._attributes[typeof(InvokeAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new InvokeAttribute(); attributeCreated = true; } break; case DomainOperation.Custom: this._operationAttribute = this._attributes[typeof(EntityActionAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new EntityActionAttribute(); attributeCreated = true; } break; default: break; } if (attributeCreated) { if (this._attributes == null) { this._attributes = new AttributeCollection(this._operationAttribute); } else { this._attributes = AttributeCollection.FromExisting(this._attributes, this._operationAttribute); } } } /// <summary> /// Gets the attributes for the operation /// </summary> public AttributeCollection Attributes { get { this.InitializeOperationAttribute(); return this._attributes; } internal set { this._attributes = value; // need to reset computed flags that are based // on operation attributes so they will be recomputed this._requiresValidation = null; this._requiresAuthorization = null; } } /// <summary> /// Gets the return Type of the operation /// </summary> public Type ReturnType { get { return this._returnType; } } /// <summary> /// Gets a value indicating whether the actual return type is a Task or Task{T}. /// </summary> public bool IsTaskAsync { get { return TypeUtility.IsTaskType(this._actualReturnType); } } /// <summary> /// Gets the parameters of the operation /// </summary> public ReadOnlyCollection<DomainOperationParameter> Parameters { get { return this._effectiveParameters; } } /// <summary> /// Invokes this <see cref="DomainOperationEntry" />. /// </summary> /// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param> /// <param name="parameters">The parameters to pass to the method.</param> /// <returns>The return value of the invoked method.</returns> public abstract object Invoke(DomainService domainService, object[] parameters); /// <summary> /// Gets the type of domain operation implemented by the method. /// </summary> public DomainOperation Operation { get { return this._operation; } internal set { this._operation = value; } } /// <summary> /// Returns the associated Type this DomainOperation operates on. For query methods /// this will be the element type of the return type (or the singleton return Type), /// and for all other methods this will be the Type of the first method parameter. /// </summary> public Type AssociatedType { get { if (this._associatedType == null) { if (this.Operation == DomainOperation.Query) { Type entityType = TypeUtility.FindIEnumerable(this.ReturnType); if (entityType != null) { entityType = entityType.GetGenericArguments()[0]; } else { entityType = this.ReturnType; } this._associatedType = entityType; } else { if (this.Parameters.Count > 0) { this._associatedType = this.Parameters[0].ParameterType; } } } return this._associatedType; } } private bool HasOutCountParameter { get { return this._hasOutCountParameter; } } /// <summary> /// Invokes this <see cref="DomainOperationEntry" />. /// </summary> /// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param> /// <param name="parameters">The parameters to pass to the method.</param> /// <param name="totalCount">The total number of rows for the input query without any paging applied to it.</param> /// <returns>The return value of the invoked method.</returns> internal object Invoke(DomainService domainService, object[] parameters, out int totalCount) { if (this.HasOutCountParameter) { object[] parametersWithCount = new object[parameters.Length + 1]; parameters.CopyTo(parametersWithCount, 0); parametersWithCount[parameters.Length] = 0; object result = this.Invoke(domainService, parametersWithCount); totalCount = (int)parametersWithCount[parameters.Length]; return result; } else { totalCount = DomainService.TotalCountUndefined; return this.Invoke(domainService, parameters); } } internal object UnwrapTaskResult(object result) { if (!IsTaskAsync) return result; if (_unwrapTaskResultFunc == null) { if (ReturnType == typeof (void)) _unwrapTaskResultFunc = UnwrapVoidResult; else { _unwrapTaskResultFunc = (Func<object, object>)Delegate.CreateDelegate(typeof(Func<object, object>), typeof(DomainOperationEntry).GetMethod("UnwrapGenericResult", BindingFlags.Static | BindingFlags.NonPublic) .MakeGenericMethod(this.ReturnType)); } } return _unwrapTaskResultFunc(result); } private static object UnwrapVoidResult(object result) { if(result == null) throw new InvalidOperationException("Task method returned null"); ((Task) result).Wait(); return null; } private static object UnwrapGenericResult<T>(object result) { if(result == null) throw new InvalidOperationException("Task method returned null"); return ((Task<T>) result).Result; } /// <summary> /// Returns a textual description of the <see cref="DomainOperationEntry"/>. /// </summary> /// <returns>A string representation of the <see cref="DomainOperationEntry"/>.</returns> public override string ToString() { StringBuilder output = new StringBuilder(); output.AppendFormat(CultureInfo.InvariantCulture, "{0} {1}(", this.ReturnType, this.Name); for (int i = 0; i < this.Parameters.Count; i++) { if (i > 0) { output.Append(", "); } output.Append(this.Parameters[i].ToString()); } output.Append(')'); return output.ToString(); } } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.elasticjob.lite.spring.namespace.job; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.elasticjob.infra.concurrent.BlockUtils; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; import org.apache.shardingsphere.elasticjob.lite.internal.schedule.JobRegistry; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.DataflowElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.FooSimpleElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.test.AbstractZookeeperJUnit4SpringContextTests; import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.annotation.Resource; import static org.junit.Assert.assertTrue; @RequiredArgsConstructor public abstract class AbstractOneOffJobSpringIntegrateTest extends AbstractZookeeperJUnit4SpringContextTests { private final String simpleJobName; private final String throughputDataflowJobName; @Resource private CoordinatorRegistryCenter regCenter; @Before @After public void reset() { FooSimpleElasticJob.reset(); DataflowElasticJob.reset(); } @After public void tearDown() { JobRegistry.getInstance().shutdown(simpleJobName); JobRegistry.getInstance().shutdown(throughputDataflowJobName); } @Test public void assertSpringJobBean() { assertSimpleElasticJobBean(); assertThroughputDataflowElasticJobBean(); } private void assertSimpleElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(simpleJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!FooSimpleElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(FooSimpleElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + simpleJobName + "/sharding")); } private void assertThroughputDataflowElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(throughputDataflowJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!DataflowElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(DataflowElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + throughputDataflowJobName + "/sharding")); } }
Java
/*- * #%L * ELK Reasoner Core * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.semanticweb.elk.reasoner.entailments.impl; import java.util.Collections; import java.util.List; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.DerivedClassInclusionEntailsObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.Entailment; import org.semanticweb.elk.reasoner.entailments.model.EntailmentInference; import org.semanticweb.elk.reasoner.entailments.model.ObjectPropertyAssertionAxiomEntailment; import org.semanticweb.elk.reasoner.saturation.conclusions.model.SubClassInclusionComposed; public class DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl extends AbstractAxiomEntailmentInference<ElkObjectPropertyAssertionAxiom, ObjectPropertyAssertionAxiomEntailment> implements DerivedClassInclusionEntailsObjectPropertyAssertionAxiom { private final SubClassInclusionComposed reason_; public DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl( final ObjectPropertyAssertionAxiomEntailment conclusion, final SubClassInclusionComposed reason) { super(conclusion); this.reason_ = reason; } @Override public List<? extends Entailment> getPremises() { return Collections.emptyList(); } @Override public SubClassInclusionComposed getReason() { return reason_; } @Override public <O> O accept(final EntailmentInference.Visitor<O> visitor) { return visitor.visit(this); } }
Java
// Copyright (C) 2004 Davis E. King ([email protected]) // License: Boost Software License See LICENSE.txt for the full license. #ifndef DLIB_MISC_API_KERNEl_1_ #include "misc_api_kernel_2.h" #endif
Java
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # $:.unshift(File.dirname(__FILE__) + '/../lib') require 'test/unit' require File.expand_path(File.join(File.dirname(__FILE__), '../../../../config/environment.rb')) require 'rubygems' require 'active_record/fixtures' config = YAML::load(IO.read( File.join(File.dirname(__FILE__),'database.yml'))) # cleanup logs and databases between test runs #FileUtils.rm File.join(File.dirname(__FILE__), "debug.log"), :force => true FileUtils.rm File.join(RAILS_ROOT, config['sqlite3'][:dbfile]), :force => true ActiveRecord::Base.logger = Logger.new(File.join(File.dirname(__FILE__), "debug.log")) ActiveRecord::Base.establish_connection(config[ENV['DB'] || 'sqlite3']) load(File.join(File.dirname(__FILE__), "schema.rb")) Test::Unit::TestCase.fixture_path = File.dirname(__FILE__) + "/fixtures/" $LOAD_PATH.unshift(Test::Unit::TestCase.fixture_path) class Test::Unit::TestCase #:nodoc: def create_fixtures(*table_names) if block_given? Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names) { yield } else Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names) end end # Turn off transactional fixtures if you're working with MyISAM tables in MySQL self.use_transactional_fixtures = true # Instantiated fixtures are slow, but give you @david where you otherwise would need people(:david) self.use_instantiated_fixtures = false # Instantiated fixtures are slow, but give you @david where you otherwise would need people(:david) end
Java
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; import org.dasein.cloud.admin.AdminServices; import org.dasein.cloud.ci.CIServices; import org.dasein.cloud.compute.ComputeServices; import org.dasein.cloud.identity.IdentityServices; import org.dasein.cloud.network.NetworkServices; import org.dasein.cloud.platform.PlatformServices; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>. * @author George Reese * @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE * @since unknown */ public abstract class AbstractCloud extends CloudProvider { /** * Constructs a cloud provider instance. */ public AbstractCloud() { } @Override public @Nullable AdminServices getAdminServices() { return null; } @Override public @Nullable ComputeServices getComputeServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getComputeServices()); } @Override public @Nonnull ContextRequirements getContextRequirements() { return new ContextRequirements( new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR), new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false) ); } @Override public @Nullable CIServices getCIServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getCIServices()); } @Override public @Nullable IdentityServices getIdentityServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getIdentityServices()); } @Override public @Nullable NetworkServices getNetworkServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getNetworkServices()); } @Override public @Nullable PlatformServices getPlatformServices() { CloudProvider compute = getComputeCloud(); return ( compute == null ? null : compute.getPlatformServices() ); } }
Java
package migrations import "github.com/BurntSushi/migration" func ReplaceStepLocationWithPlanID(tx migration.LimitedTx) error { _, err := tx.Exec(` ALTER TABLE containers DROP COLUMN step_location; `) if err != nil { return err } _, err = tx.Exec(` ALTER TABLE containers ADD COLUMN plan_id text; `) return err }
Java
#-- # Author:: Daniel DeLeo (<[email protected]>) # Copyright:: Copyright 2012-2018, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "chef/node/common_api" require "chef/node/mixin/state_tracking" class Chef class Node # == AttrArray # AttrArray is identical to Array, except that it keeps a reference to the # "root" (Chef::Node::Attribute) object, and will trigger a cache # invalidation on that object when mutated. class AttrArray < Array MUTATOR_METHODS = [ :<<, :[]=, :clear, :collect!, :compact!, :default=, :default_proc=, :delete_at, :delete_if, :fill, :flatten!, :insert, :keep_if, :map!, :merge!, :pop, :push, :update, :reject!, :reverse!, :replace, :select!, :shift, :slice!, :sort!, :sort_by!, :uniq!, :unshift, ] # For all of the methods that may mutate an Array, we override them to # also invalidate the cached merged_attributes on the root # Node::Attribute object. MUTATOR_METHODS.each do |mutator| define_method(mutator) do |*args, &block| ret = super(*args, &block) send_reset_cache ret end end def delete(key, &block) send_reset_cache(__path__, key) super end def initialize(data = []) super(data) map! { |e| convert_value(e) } end # For elements like Fixnums, true, nil... def safe_dup(e) e.dup rescue TypeError e end def dup Array.new(map { |e| safe_dup(e) }) end private def convert_value(value) case value when VividMash value when AttrArray value when Hash VividMash.new(value, __root__, __node__, __precedence__) when Array AttrArray.new(value, __root__, __node__, __precedence__) else value end end # needed for __path__ def convert_key(key) key end prepend Chef::Node::Mixin::StateTracking end # == VividMash # VividMash is identical to a Mash, with a few exceptions: # * It has a reference to the root Chef::Node::Attribute to which it # belongs, and will trigger cache invalidation on that object when # mutated. # * It auto-vivifies, that is a reference to a missing element will result # in the creation of a new VividMash for that key. (This only works when # using the element reference method, `[]` -- other methods, such as # #fetch, work as normal). # * attr_accessor style element set and get are supported via method_missing class VividMash < Mash include CommonAPI # Methods that mutate a VividMash. Each of them is overridden so that it # also invalidates the cached merged_attributes on the root Attribute # object. MUTATOR_METHODS = [ :clear, :delete_if, :keep_if, :merge!, :update, :reject!, :replace, :select!, :shift, ] # For all of the mutating methods on Mash, override them so that they # also invalidate the cached `merged_attributes` on the root Attribute # object. def delete(key, &block) send_reset_cache(__path__, key) super end MUTATOR_METHODS.each do |mutator| define_method(mutator) do |*args, &block| send_reset_cache super(*args, &block) end end def initialize(data = {}) super(data) end def [](key) value = super if !key?(key) value = self.class.new({}, __root__) self[key] = value else value end end def []=(key, value) ret = super send_reset_cache(__path__, key) ret # rubocop:disable Lint/Void end alias :attribute? :has_key? def convert_key(key) super end # Mash uses #convert_value to mashify values on input. # We override it here to convert hash or array values to VividMash or # AttrArray for consistency and to ensure that the added parts of the # attribute tree will have the correct cache invalidation behavior. def convert_value(value) case value when VividMash value when AttrArray value when Hash VividMash.new(value, __root__, __node__, __precedence__) when Array AttrArray.new(value, __root__, __node__, __precedence__) else value end end def dup Mash.new(self) end prepend Chef::Node::Mixin::StateTracking end end end
Java
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Lars Maier //////////////////////////////////////////////////////////////////////////////// #pragma once #include <iostream> #include <memory> #include <utility> #include "velocypack/Buffer.h" #include "velocypack/Iterator.h" #include "velocypack/Slice.h" namespace arangodb::tests::deserializer { struct slice_access { enum class type { GET, HAS_KEY, COPY_STRING, IS_NUMBER, IS_ARRAY, IS_OBJECT, IS_NONE, LENGTH, AT, GET_NUMBER, IS_STRING, IS_BOOL, GET_BOOL, GET_NUMERIC_VALUE, IS_EQUAL_STRING, STRING_VIEW, ARRAY_ITER_ACCESS, OBJECT_ITER_ACCESS, }; std::string key; std::string parameter; type what; slice_access(std::string key, type what) : key(std::move(key)), what(what) {} slice_access(std::string key, type what, std::string parameter) : key(std::move(key)), parameter(std::move(parameter)), what(what) {} }; struct slice_access_tape { std::vector<slice_access> tape; template<typename... S> void record(S&&... s) { tape.emplace_back(slice_access{std::forward<S>(s)...}); } }; static std::ostream& operator<<(std::ostream& os, slice_access::type type) { #define enum_to_string(s) \ case slice_access::type::s: \ os << #s; \ break; switch (type) { enum_to_string(GET) enum_to_string(HAS_KEY) enum_to_string(COPY_STRING) enum_to_string(IS_NUMBER) enum_to_string(IS_ARRAY) enum_to_string(IS_OBJECT) enum_to_string(IS_NONE) enum_to_string(LENGTH) enum_to_string(AT) enum_to_string(GET_NUMBER) enum_to_string(IS_STRING) enum_to_string(IS_BOOL) enum_to_string(GET_BOOL) enum_to_string(GET_NUMERIC_VALUE) enum_to_string(IS_EQUAL_STRING) enum_to_string(STRING_VIEW) enum_to_string(ARRAY_ITER_ACCESS) enum_to_string(OBJECT_ITER_ACCESS) } return os; #undef enum_to_string } static inline std::ostream& operator<<(std::ostream& os, slice_access_tape const& tape) { for (auto const& e : tape.tape) { os << e.key << ' ' << e.what << ' ' << e.parameter << std::endl; } return os; } struct recording_slice { explicit recording_slice() = default; explicit recording_slice(arangodb::velocypack::Slice slice, std::shared_ptr<slice_access_tape> tape) : tape(std::move(tape)), slice(slice) {} explicit recording_slice(arangodb::velocypack::Slice slice, std::shared_ptr<slice_access_tape> tape, std::string prefix) : tape(std::move(tape)), slice(slice), prefix(std::move(prefix)) {} std::shared_ptr<slice_access_tape> tape; arangodb::velocypack::Slice slice; std::string prefix = "$"; bool isNumber() const; bool isArray() const { tape->record(prefix, slice_access::type::IS_ARRAY); return slice.isArray(); } bool isString() const { tape->record(prefix, slice_access::type::IS_STRING); return slice.isString(); } bool isBool() const { tape->record(prefix, slice_access::type::IS_BOOL); return slice.isBool(); } bool isObject() const { tape->record(prefix, slice_access::type::IS_OBJECT); return slice.isObject(); } auto length() const { tape->record(prefix, slice_access::type::LENGTH); return slice.length(); } template<typename T> auto at(T t) const { tape->record(prefix + '[' + std::to_string(t) + ']', slice_access::type::AT); return recording_slice(slice.at(t), tape, prefix + '[' + std::to_string(t) + ']'); } template<typename T> auto hasKey(T&& t) const { tape->record(prefix, slice_access::type::HAS_KEY); return slice.hasKey(std::forward<T>(t)); } template<typename T> auto getNumber() const { tape->record(prefix, slice_access::type::GET_NUMBER); return slice.getNumber<T>(); } auto copyString() const { tape->record(prefix, slice_access::type::COPY_STRING); return slice.copyString(); } auto getBool() const { tape->record(prefix, slice_access::type::GET_BOOL); return slice.getBool(); } template<typename T> auto isNumber() const { tape->record(prefix, slice_access::type::IS_NUMBER); return slice.isNumber<T>(); } template<typename... Ts> auto isEqualString(Ts&&... ts) const { tape->record(prefix, slice_access::type::IS_EQUAL_STRING); return slice.isEqualString(std::forward<Ts>(ts)...); } constexpr static auto nullSlice = arangodb::velocypack::Slice::nullSlice; template<typename T> auto get(T&& t) const { tape->record(prefix, slice_access::type::GET, t); return recording_slice(slice.get(std::forward<T>(t)), tape, prefix + '.' + t); } auto toJson() const { return slice.toJson(); } auto isNone() const { tape->record(prefix, slice_access::type::IS_NONE); return slice.isNone(); } auto stringView() const { tape->record(prefix, slice_access::type::STRING_VIEW); return slice.stringView(); } static recording_slice from_buffer( arangodb::velocypack::Buffer<uint8_t> const& b) { return recording_slice(arangodb::velocypack::Slice(b.data()), std::make_shared<slice_access_tape>()); } }; struct object_iterator { object_iterator(arangodb::velocypack::ObjectIterator const& o, std::shared_ptr<slice_access_tape> tape, std::string prefix) : iter(o), tape(std::move(tape)), prefix(std::move(prefix)) {} object_iterator(recording_slice& slice, bool useSequentialIteration = false) : iter(slice.slice, useSequentialIteration), tape(slice.tape), prefix(slice.prefix){}; struct pair { recording_slice key, value; }; object_iterator begin() const { return {iter.begin(), tape, prefix}; } object_iterator end() const { return {iter.end(), tape, prefix}; } object_iterator& operator++() { iter.operator++(); return *this; } bool operator!=(object_iterator const& other) const { return iter.operator!=(other.iter); } pair operator*() const { auto internal = iter.operator*(); tape->record(prefix, slice_access::type::OBJECT_ITER_ACCESS, internal.key.copyString()); return pair{ recording_slice(internal.key, tape, prefix + "@key[" + internal.key.copyString() + ']'), recording_slice(internal.value, tape, prefix + '.' + internal.key.copyString())}; } arangodb::velocypack::ObjectIterator iter; std::shared_ptr<slice_access_tape> tape; std::string prefix; }; struct array_iterator { array_iterator(arangodb::velocypack::ArrayIterator const& o, std::shared_ptr<slice_access_tape> tape, std::string prefix) : iter(o), tape(std::move(tape)), prefix(std::move(prefix)), index(0) {} explicit array_iterator(recording_slice& slice) : iter(slice.slice), tape(slice.tape), prefix(slice.prefix), index(0){}; array_iterator begin() const { return {iter.begin(), tape, prefix}; } array_iterator end() const { return {iter.end(), tape, prefix}; } recording_slice operator*() const { tape->record(prefix, slice_access::type::ARRAY_ITER_ACCESS, std::to_string(index)); auto internal = iter.operator*(); return recording_slice(internal, tape, prefix + "[" + std::to_string(index) + ']'); } bool operator!=(array_iterator const& other) const { return iter.operator!=(other.iter); } array_iterator& operator++() { ++index; iter.operator++(); return *this; } array_iterator operator++(int) { array_iterator result(*this); ++(*this); return result; } arangodb::velocypack::ArrayIterator iter; std::shared_ptr<slice_access_tape> tape; std::string prefix; std::size_t index; }; } // namespace arangodb::tests::deserializer #ifdef DESERIALIZER_SET_TEST_TYPES namespace deserializer { using slice_type = ::deserializer::test::recording_slice; using object_iterator = ::deserializer::test::object_iterator; using array_iterator = ::deserializer::test::array_iterator; } // namespace deserializer #endif
Java
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim:set ts=2 sw=2 sts=2 et cindent: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this file, * You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_BASEPIN_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_BASEPIN_H_ #include "BaseFilter.h" #include "MediaType.h" #include "dshow.h" #include "strmif.h" #include <string> namespace mozilla { namespace media { _COM_SMARTPTR_TYPEDEF(IPin, __uuidof(IPin)); // Base class for DirectShow filter pins. // // Implements: // * IPin // * IQualityControl // * IUnknown // class DECLSPEC_UUID("199669c6-672a-4130-b13e-57aa830eae55") BasePin : public IPin , public IQualityControl { public: BasePin(BaseFilter* aFilter, CriticalSection* aLock, const wchar_t* aName, PIN_DIRECTION aDirection); virtual ~BasePin() {} // Reference count of the pin is actually stored on the owning filter. // So don't AddRef() the filter from the pin, else you'll create a cycle. STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface); STDMETHODIMP_(ULONG) AddRef() { return mFilter->AddRef(); } STDMETHODIMP_(ULONG) Release() { return mFilter->Release(); } // IPin overrides. // Connects the pin to another pin. The pmt parameter can be NULL or a // partial media type. STDMETHODIMP Connect(IPin* aReceivePin, const AM_MEDIA_TYPE* aMediaType); //Accepts a connection from another pin. STDMETHODIMP ReceiveConnection(IPin* aConnector, const AM_MEDIA_TYPE* aMediaType); // Breaks the current pin connection. STDMETHODIMP Disconnect(); // Retrieves the pin connected to this pin. STDMETHODIMP ConnectedTo(IPin** aPin); // Retrieves the media type for the current pin connection. STDMETHODIMP ConnectionMediaType(AM_MEDIA_TYPE* aMediaType); // Retrieves information about the pin, such as the name, the owning filter, // and the direction. STDMETHODIMP QueryPinInfo(PIN_INFO* aInfo); // Retrieves the direction of the pin (input or output). STDMETHODIMP QueryDirection(PIN_DIRECTION* aDirection); // Retrieves the pin identifier. STDMETHODIMP QueryId(LPWSTR* Id); // Determines whether the pin accepts a specified media type. STDMETHODIMP QueryAccept(const AM_MEDIA_TYPE* aMediaType); // Enumerates the pin's preferred media types. STDMETHODIMP EnumMediaTypes(IEnumMediaTypes** aEnum); // Retrieves the pins that are connected internally to this pin // (within the filter). STDMETHODIMP QueryInternalConnections(IPin** apPin, ULONG* aPin); // Notifies the pin that no additional data is expected. STDMETHODIMP EndOfStream(void); // IPin::BeginFlush() and IPin::EndFlush() are still pure virtual, // and must be implemented in a subclass. // Notifies the pin that media samples received after this call // are grouped as a segment. STDMETHODIMP NewSegment( REFERENCE_TIME aStartTime, REFERENCE_TIME aStopTime, double aRate); // IQualityControl overrides. // Notifies the recipient that a quality change is requested. STDMETHODIMP Notify(IBaseFilter * aSender, Quality aQuality); // Sets the IQualityControl object that will receive quality messages. STDMETHODIMP SetSink(IQualityControl* aQualitySink); // Other methods. // Sets the media type of the connection. virtual HRESULT SetMediaType(const MediaType *aMediaType); // check if the pin can support this specific proposed type and format virtual HRESULT CheckMediaType(const MediaType *) = 0; // This is called to release any resources needed for a connection. virtual HRESULT BreakConnect(); // Called when we've made a connection to another pin. Returning failure // triggers the caller to break the connection. Subclasses may want to // override this. virtual HRESULT CompleteConnect(IPin *pReceivePin); // Checks if this pin can connect to |aPin|. We expect sub classes to // override this method to support their own needs. Default implementation // simply checks that the directions of the pins do not match. virtual HRESULT CheckConnect(IPin *); // Check if our filter is currently stopped BOOL IsStopped() { return mFilter->mState == State_Stopped; }; // Moves pin to active state (running or paused). Subclasses will // override to prepare to handle data. virtual HRESULT Active(void); // Moves pin into inactive state (stopped). Releases resources associated // with handling data. Subclasses should override this. virtual HRESULT Inactive(void); // Called when Run() is called on the parent filter. Subclasses may want to // override this. virtual HRESULT Run(REFERENCE_TIME aStartTime); // Gets the supported media types for this pin. virtual HRESULT GetMediaType(int aIndex, MediaType *aMediaType); // Access name. const std::wstring& Name() { return mName; }; bool IsConnected() { return mConnectedPin != NULL; } IPin* GetConnected() { return mConnectedPin; } protected: // The pin's name, as returned by QueryPinInfo(). std::wstring mName; // Event sink for quality messages. IQualityControl *mQualitySink; // The pin which this one is connected to. IPinPtr mConnectedPin; // Direction of data flow through this pin. PIN_DIRECTION mDirection; // Media type of the pin's connection. MediaType mMediaType; // Our state lock. All state should be accessed while this is locked. mozilla::CriticalSection *mLock; // Our owning filter. BaseFilter *mFilter; // This pin attempts to connect to |aPin| with media type |aMediaType|. // If |aMediaType| is fully specified, we must attempt to connect with // that, else we just enumerate our types, then the other pin's type and // try them, filtering them using |aMediaType| if it's paritally // specificed. Used by Connect(). HRESULT AttemptConnection(IPin* aPin, const MediaType* aMediaType); // Tries to form a connection using all media types in the enumeration. HRESULT TryMediaTypes(IPin *aPin, const MediaType *aMediaType, IEnumMediaTypes *aEnum); }; _COM_SMARTPTR_TYPEDEF(BasePin, __uuidof(BasePin)); } } #endif
Java
'use strict'; /* global describe, it */ var fs = require('fs'); var expect = require('chai').expect; var bigrig = require('../'); describe('Big Rig', function () { it ('throws if no processes are found', function () { expect(function () { bigrig.analyze(null); }).to.throw('Zero processes (tabs) found.'); }); it ('throws if given invalid input data is given', function () { expect(function () { bigrig.analyze('wobble'); }).to.throw('Invalid trace contents; not JSON'); }); it ('throws if given a trace with extensions and strict mode is enabled', function (done) { fs.readFile('./test/data/load-extensions.json', 'utf8', function (err, data) { if (err) { throw err; } var error = 'Extensions running during capture; ' + 'see http://bit.ly/bigrig-extensions'; expect(function () { bigrig.analyze(data, { strict: true }); }).to.throw(error); done(); }); }); // TODO(paullewis) Add multiprocess test. it ('returns JSON for a file with a single process', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData).to.be.an('array'); expect(jsonData[0]).to.be.an('object'); done(); }); }); it ('generates valid JSON', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); jsonData = JSON.parse(JSON.stringify(jsonData)); expect(jsonData).to.be.an('array'); done(); }); }); it ('supports timed ranges', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0]).to.be.an('object'); expect(jsonData[0].title).to.equal('sideNavAnimation'); expect(jsonData[0].start).to.be.above(0); expect(jsonData[0].end).to.be.within(1179, 1180); done(); }); }); it ('correctly applies RAIL type when time range is specified', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data, { types: { 'sideNavAnimation': bigrig.ANIMATION } }); expect(jsonData[0].type).to.equal(bigrig.ANIMATION); done(); }); }); it ('correctly infers RAIL Load when time range not specified', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.LOAD); expect(jsonData[0].title).to.equal('Load'); done(); }); }); it ('correctly infers RAIL Response when time range not specified', function (done) { fs.readFile('./test/data/response.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.RESPONSE); expect(jsonData[0].title).to.equal('sideNavResponse'); done(); }); }); it ('correctly infers RAIL Animation when time range not specified', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.ANIMATION); expect(jsonData[0].title).to.equal('sideNavAnimation'); done(); }); }); it ('correctly infers multiple RAIL regions', function (done) { fs.readFile('./test/data/response-animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData.length).to.equal(2); expect(jsonData[0].type).to.equal(bigrig.RESPONSE); expect(jsonData[0].title).to.equal('sideNavResponse'); expect(jsonData[1].type).to.equal(bigrig.ANIMATION); expect(jsonData[1].title).to.equal('sideNavAnimation'); done(); }); }); it ('returns the correct fps for animations', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].fps).to.be.within(59, 61); done(); }); }); it ('returns the correct JS breakdown', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect( jsonData[0].extendedInfo.javaScript['localhost:11080'] ).to.be.within(245, 246); expect( jsonData[0].extendedInfo.javaScript['www.google-analytics.com'] ).to.be.within(59, 60); done(); }); }); it ('correctly captures forced layouts and recalcs', function (done) { fs.readFile('./test/data/forced-recalc-layout.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect( jsonData[0].extendedInfo.forcedRecalcs ).to.equal(1); expect( jsonData[0].extendedInfo.forcedLayouts ).to.equal(1); done(); }); }); });
Java
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.qubole.operators.qubole`.""" import warnings # pylint: disable=unused-import from airflow.providers.qubole.operators.qubole import QuboleOperator # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.qubole.operators.qubole`.", DeprecationWarning, stacklevel=2, )
Java
package io.katharsis.jpa.meta; import java.io.Serializable; import java.util.UUID; import org.junit.Assert; import org.junit.Test; import io.katharsis.meta.model.MetaPrimitiveType; public class MetaPrimitiveTypeTest { @Test public void testString() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(String.class); } @Test public void testInteger() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Integer.class); } @Test public void testShort() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Short.class); } @Test public void testLong() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Long.class); } @Test public void testFloat() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Float.class); } @Test public void testDouble() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Double.class); } @Test public void testBoolean() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Boolean.class); } @Test public void testByte() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Byte.class); } @Test public void testUUID() { UUID uuid = UUID.randomUUID(); MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(UUID.class); } enum TestEnum { A } @Test public void testEnum() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestEnum.class); } public static class TestObjectWithParse { int value; public static TestObjectWithParse parse(String value) { TestObjectWithParse parser = new TestObjectWithParse(); parser.value = Integer.parseInt(value); return parser; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestObjectWithParse other = (TestObjectWithParse) obj; if (value != other.value) return false; return true; } } public static class TestObjectWithConstructor implements Serializable { int value; public TestObjectWithConstructor() { } public TestObjectWithConstructor(String value) { this.value = Integer.parseInt(value); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestObjectWithConstructor other = (TestObjectWithConstructor) obj; if (value != other.value) return false; return true; } } @Test public void testParse() { TestObjectWithParse value = new TestObjectWithParse(); value.value = 12; MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestObjectWithParse.class); } @Test public void testOther() { TestObjectWithConstructor value = new TestObjectWithConstructor(); value.value = 12; MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestObjectWithConstructor.class); } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.clustering import org.apache.hadoop.fs.Path import org.json4s.DefaultFormats import org.json4s.JsonAST.JObject import org.json4s.jackson.JsonMethods._ import org.apache.spark.annotation.{DeveloperApi, Since} import org.apache.spark.internal.Logging import org.apache.spark.ml.{Estimator, Model} import org.apache.spark.ml.linalg.{Matrix, Vector, Vectors, VectorUDT} import org.apache.spark.ml.param._ import org.apache.spark.ml.param.shared.{HasCheckpointInterval, HasFeaturesCol, HasMaxIter, HasSeed} import org.apache.spark.ml.util._ import org.apache.spark.ml.util.DefaultParamsReader.Metadata import org.apache.spark.mllib.clustering.{DistributedLDAModel => OldDistributedLDAModel, EMLDAOptimizer => OldEMLDAOptimizer, LDA => OldLDA, LDAModel => OldLDAModel, LDAOptimizer => OldLDAOptimizer, LocalLDAModel => OldLocalLDAModel, OnlineLDAOptimizer => OldOnlineLDAOptimizer} import org.apache.spark.mllib.impl.PeriodicCheckpointer import org.apache.spark.mllib.linalg.{Vector => OldVector, Vectors => OldVectors} import org.apache.spark.mllib.linalg.MatrixImplicits._ import org.apache.spark.mllib.linalg.VectorImplicits._ import org.apache.spark.mllib.util.MLUtils import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession} import org.apache.spark.sql.functions.{col, monotonically_increasing_id, udf} import org.apache.spark.sql.types.StructType import org.apache.spark.util.VersionUtils private[clustering] trait LDAParams extends Params with HasFeaturesCol with HasMaxIter with HasSeed with HasCheckpointInterval { /** * Param for the number of topics (clusters) to infer. Must be &gt; 1. Default: 10. * * @group param */ @Since("1.6.0") final val k = new IntParam(this, "k", "The number of topics (clusters) to infer. " + "Must be > 1.", ParamValidators.gt(1)) /** @group getParam */ @Since("1.6.0") def getK: Int = $(k) /** * Concentration parameter (commonly named "alpha") for the prior placed on documents' * distributions over topics ("theta"). * * This is the parameter to a Dirichlet distribution, where larger values mean more smoothing * (more regularization). * * If not set by the user, then docConcentration is set automatically. If set to * singleton vector [alpha], then alpha is replicated to a vector of length k in fitting. * Otherwise, the [[docConcentration]] vector must be length k. * (default = automatic) * * Optimizer-specific parameter settings: * - EM * - Currently only supports symmetric distributions, so all values in the vector should be * the same. * - Values should be greater than 1.0 * - default = uniformly (50 / k) + 1, where 50/k is common in LDA libraries and +1 follows * from Asuncion et al. (2009), who recommend a +1 adjustment for EM. * - Online * - Values should be greater than or equal to 0 * - default = uniformly (1.0 / k), following the implementation from * <a href="https://github.com/Blei-Lab/onlineldavb">here</a>. * * @group param */ @Since("1.6.0") final val docConcentration = new DoubleArrayParam(this, "docConcentration", "Concentration parameter (commonly named \"alpha\") for the prior placed on documents'" + " distributions over topics (\"theta\").", (alpha: Array[Double]) => alpha.forall(_ >= 0.0)) /** @group getParam */ @Since("1.6.0") def getDocConcentration: Array[Double] = $(docConcentration) /** Get docConcentration used by spark.mllib LDA */ protected def getOldDocConcentration: Vector = { if (isSet(docConcentration)) { Vectors.dense(getDocConcentration) } else { Vectors.dense(-1.0) } } /** * Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics' * distributions over terms. * * This is the parameter to a symmetric Dirichlet distribution. * * Note: The topics' distributions over terms are called "beta" in the original LDA paper * by Blei et al., but are called "phi" in many later papers such as Asuncion et al., 2009. * * If not set by the user, then topicConcentration is set automatically. * (default = automatic) * * Optimizer-specific parameter settings: * - EM * - Value should be greater than 1.0 * - default = 0.1 + 1, where 0.1 gives a small amount of smoothing and +1 follows * Asuncion et al. (2009), who recommend a +1 adjustment for EM. * - Online * - Value should be greater than or equal to 0 * - default = (1.0 / k), following the implementation from * <a href="https://github.com/Blei-Lab/onlineldavb">here</a>. * * @group param */ @Since("1.6.0") final val topicConcentration = new DoubleParam(this, "topicConcentration", "Concentration parameter (commonly named \"beta\" or \"eta\") for the prior placed on topic'" + " distributions over terms.", ParamValidators.gtEq(0)) /** @group getParam */ @Since("1.6.0") def getTopicConcentration: Double = $(topicConcentration) /** Get topicConcentration used by spark.mllib LDA */ protected def getOldTopicConcentration: Double = { if (isSet(topicConcentration)) { getTopicConcentration } else { -1.0 } } /** Supported values for Param [[optimizer]]. */ @Since("1.6.0") final val supportedOptimizers: Array[String] = Array("online", "em") /** * Optimizer or inference algorithm used to estimate the LDA model. * Currently supported (case-insensitive): * - "online": Online Variational Bayes (default) * - "em": Expectation-Maximization * * For details, see the following papers: * - Online LDA: * Hoffman, Blei and Bach. "Online Learning for Latent Dirichlet Allocation." * Neural Information Processing Systems, 2010. * See <a href="http://www.cs.columbia.edu/~blei/papers/HoffmanBleiBach2010b.pdf">here</a> * - EM: * Asuncion et al. "On Smoothing and Inference for Topic Models." * Uncertainty in Artificial Intelligence, 2009. * See <a href="http://arxiv.org/pdf/1205.2662.pdf">here</a> * * @group param */ @Since("1.6.0") final val optimizer = new Param[String](this, "optimizer", "Optimizer or inference" + " algorithm used to estimate the LDA model. Supported: " + supportedOptimizers.mkString(", "), (o: String) => ParamValidators.inArray(supportedOptimizers).apply(o.toLowerCase)) /** @group getParam */ @Since("1.6.0") def getOptimizer: String = $(optimizer) /** * Output column with estimates of the topic mixture distribution for each document (often called * "theta" in the literature). Returns a vector of zeros for an empty document. * * This uses a variational approximation following Hoffman et al. (2010), where the approximate * distribution is called "gamma." Technically, this method returns this approximation "gamma" * for each document. * * @group param */ @Since("1.6.0") final val topicDistributionCol = new Param[String](this, "topicDistributionCol", "Output column" + " with estimates of the topic mixture distribution for each document (often called \"theta\"" + " in the literature). Returns a vector of zeros for an empty document.") setDefault(topicDistributionCol -> "topicDistribution") /** @group getParam */ @Since("1.6.0") def getTopicDistributionCol: String = $(topicDistributionCol) /** * For Online optimizer only: [[optimizer]] = "online". * * A (positive) learning parameter that downweights early iterations. Larger values make early * iterations count less. * This is called "tau0" in the Online LDA paper (Hoffman et al., 2010) * Default: 1024, following Hoffman et al. * * @group expertParam */ @Since("1.6.0") final val learningOffset = new DoubleParam(this, "learningOffset", "(For online optimizer)" + " A (positive) learning parameter that downweights early iterations. Larger values make early" + " iterations count less.", ParamValidators.gt(0)) /** @group expertGetParam */ @Since("1.6.0") def getLearningOffset: Double = $(learningOffset) /** * For Online optimizer only: [[optimizer]] = "online". * * Learning rate, set as an exponential decay rate. * This should be between (0.5, 1.0] to guarantee asymptotic convergence. * This is called "kappa" in the Online LDA paper (Hoffman et al., 2010). * Default: 0.51, based on Hoffman et al. * * @group expertParam */ @Since("1.6.0") final val learningDecay = new DoubleParam(this, "learningDecay", "(For online optimizer)" + " Learning rate, set as an exponential decay rate. This should be between (0.5, 1.0] to" + " guarantee asymptotic convergence.", ParamValidators.gt(0)) /** @group expertGetParam */ @Since("1.6.0") def getLearningDecay: Double = $(learningDecay) /** * For Online optimizer only: [[optimizer]] = "online". * * Fraction of the corpus to be sampled and used in each iteration of mini-batch gradient descent, * in range (0, 1]. * * Note that this should be adjusted in synch with `LDA.maxIter` * so the entire corpus is used. Specifically, set both so that * maxIterations * miniBatchFraction greater than or equal to 1. * * Note: This is the same as the `miniBatchFraction` parameter in * [[org.apache.spark.mllib.clustering.OnlineLDAOptimizer]]. * * Default: 0.05, i.e., 5% of total documents. * * @group param */ @Since("1.6.0") final val subsamplingRate = new DoubleParam(this, "subsamplingRate", "(For online optimizer)" + " Fraction of the corpus to be sampled and used in each iteration of mini-batch" + " gradient descent, in range (0, 1].", ParamValidators.inRange(0.0, 1.0, lowerInclusive = false, upperInclusive = true)) /** @group getParam */ @Since("1.6.0") def getSubsamplingRate: Double = $(subsamplingRate) /** * For Online optimizer only (currently): [[optimizer]] = "online". * * Indicates whether the docConcentration (Dirichlet parameter for * document-topic distribution) will be optimized during training. * Setting this to true will make the model more expressive and fit the training data better. * Default: false * * @group expertParam */ @Since("1.6.0") final val optimizeDocConcentration = new BooleanParam(this, "optimizeDocConcentration", "(For online optimizer only, currently) Indicates whether the docConcentration" + " (Dirichlet parameter for document-topic distribution) will be optimized during training.") /** @group expertGetParam */ @Since("1.6.0") def getOptimizeDocConcentration: Boolean = $(optimizeDocConcentration) /** * For EM optimizer only: [[optimizer]] = "em". * * If using checkpointing, this indicates whether to keep the last * checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can * cause failures if a data partition is lost, so set this bit with care. * Note that checkpoints will be cleaned up via reference counting, regardless. * * See `DistributedLDAModel.getCheckpointFiles` for getting remaining checkpoints and * `DistributedLDAModel.deleteCheckpointFiles` for removing remaining checkpoints. * * Default: true * * @group expertParam */ @Since("2.0.0") final val keepLastCheckpoint = new BooleanParam(this, "keepLastCheckpoint", "(For EM optimizer) If using checkpointing, this indicates whether to keep the last" + " checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can" + " cause failures if a data partition is lost, so set this bit with care.") /** @group expertGetParam */ @Since("2.0.0") def getKeepLastCheckpoint: Boolean = $(keepLastCheckpoint) /** * Validates and transforms the input schema. * * @param schema input schema * @return output schema */ protected def validateAndTransformSchema(schema: StructType): StructType = { if (isSet(docConcentration)) { if (getDocConcentration.length != 1) { require(getDocConcentration.length == getK, s"LDA docConcentration was of length" + s" ${getDocConcentration.length}, but k = $getK. docConcentration must be an array of" + s" length either 1 (scalar) or k (num topics).") } getOptimizer match { case "online" => require(getDocConcentration.forall(_ >= 0), "For Online LDA optimizer, docConcentration values must be >= 0. Found values: " + getDocConcentration.mkString(",")) case "em" => require(getDocConcentration.forall(_ >= 0), "For EM optimizer, docConcentration values must be >= 1. Found values: " + getDocConcentration.mkString(",")) } } if (isSet(topicConcentration)) { getOptimizer match { case "online" => require(getTopicConcentration >= 0, s"For Online LDA optimizer, topicConcentration" + s" must be >= 0. Found value: $getTopicConcentration") case "em" => require(getTopicConcentration >= 0, s"For EM optimizer, topicConcentration" + s" must be >= 1. Found value: $getTopicConcentration") } } SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT) SchemaUtils.appendColumn(schema, $(topicDistributionCol), new VectorUDT) } private[clustering] def getOldOptimizer: OldLDAOptimizer = getOptimizer match { case "online" => new OldOnlineLDAOptimizer() .setTau0($(learningOffset)) .setKappa($(learningDecay)) .setMiniBatchFraction($(subsamplingRate)) .setOptimizeDocConcentration($(optimizeDocConcentration)) case "em" => new OldEMLDAOptimizer() .setKeepLastCheckpoint($(keepLastCheckpoint)) } } private object LDAParams { /** * Equivalent to [[DefaultParamsReader.getAndSetParams()]], but handles [[LDA]] and [[LDAModel]] * formats saved with Spark 1.6, which differ from the formats in Spark 2.0+. * * @param model [[LDA]] or [[LDAModel]] instance. This instance will be modified with * [[Param]] values extracted from metadata. * @param metadata Loaded model metadata */ def getAndSetParams(model: LDAParams, metadata: Metadata): Unit = { VersionUtils.majorMinorVersion(metadata.sparkVersion) match { case (1, 6) => implicit val format = DefaultFormats metadata.params match { case JObject(pairs) => pairs.foreach { case (paramName, jsonValue) => val origParam = if (paramName == "topicDistribution") "topicDistributionCol" else paramName val param = model.getParam(origParam) val value = param.jsonDecode(compact(render(jsonValue))) model.set(param, value) } case _ => throw new IllegalArgumentException( s"Cannot recognize JSON metadata: ${metadata.metadataJson}.") } case _ => // 2.0+ DefaultParamsReader.getAndSetParams(model, metadata) } } } /** * Model fitted by [[LDA]]. * * @param vocabSize Vocabulary size (number of terms or words in the vocabulary) * @param sparkSession Used to construct local DataFrames for returning query results */ @Since("1.6.0") abstract class LDAModel private[ml] ( @Since("1.6.0") override val uid: String, @Since("1.6.0") val vocabSize: Int, @Since("1.6.0") @transient private[ml] val sparkSession: SparkSession) extends Model[LDAModel] with LDAParams with Logging with MLWritable { // NOTE to developers: // This abstraction should contain all important functionality for basic LDA usage. // Specializations of this class can contain expert-only functionality. /** * Underlying spark.mllib model. * If this model was produced by Online LDA, then this is the only model representation. * If this model was produced by EM, then this local representation may be built lazily. */ @Since("1.6.0") private[clustering] def oldLocalModel: OldLocalLDAModel /** Returns underlying spark.mllib model, which may be local or distributed */ @Since("1.6.0") private[clustering] def getModel: OldLDAModel private[ml] def getEffectiveDocConcentration: Array[Double] = getModel.docConcentration.toArray private[ml] def getEffectiveTopicConcentration: Double = getModel.topicConcentration /** * The features for LDA should be a `Vector` representing the word counts in a document. * The vector should be of length vocabSize, with counts for each term (word). * * @group setParam */ @Since("1.6.0") def setFeaturesCol(value: String): this.type = set(featuresCol, value) @Since("2.2.0") def setTopicDistributionCol(value: String): this.type = set(topicDistributionCol, value) /** @group setParam */ @Since("1.6.0") def setSeed(value: Long): this.type = set(seed, value) /** * Transforms the input dataset. * * WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]] * is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver. * This implementation may be changed in the future. */ @Since("2.0.0") override def transform(dataset: Dataset[_]): DataFrame = { if ($(topicDistributionCol).nonEmpty) { // TODO: Make the transformer natively in ml framework to avoid extra conversion. val transformer = oldLocalModel.getTopicDistributionMethod(sparkSession.sparkContext) val t = udf { (v: Vector) => transformer(OldVectors.fromML(v)).asML } dataset.withColumn($(topicDistributionCol), t(col($(featuresCol)))).toDF() } else { logWarning("LDAModel.transform was called without any output columns. Set an output column" + " such as topicDistributionCol to produce results.") dataset.toDF() } } @Since("1.6.0") override def transformSchema(schema: StructType): StructType = { validateAndTransformSchema(schema) } /** * Value for [[docConcentration]] estimated from data. * If Online LDA was used and [[optimizeDocConcentration]] was set to false, * then this returns the fixed (given) value for the [[docConcentration]] parameter. */ @Since("2.0.0") def estimatedDocConcentration: Vector = getModel.docConcentration /** * Inferred topics, where each topic is represented by a distribution over terms. * This is a matrix of size vocabSize x k, where each column is a topic. * No guarantees are given about the ordering of the topics. * * WARNING: If this model is actually a [[DistributedLDAModel]] instance produced by * the Expectation-Maximization ("em") [[optimizer]], then this method could involve * collecting a large amount of data to the driver (on the order of vocabSize x k). */ @Since("2.0.0") def topicsMatrix: Matrix = oldLocalModel.topicsMatrix.asML /** Indicates whether this instance is of type [[DistributedLDAModel]] */ @Since("1.6.0") def isDistributed: Boolean /** * Calculates a lower bound on the log likelihood of the entire corpus. * * See Equation (16) in the Online LDA paper (Hoffman et al., 2010). * * WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]] * is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver. * This implementation may be changed in the future. * * @param dataset test corpus to use for calculating log likelihood * @return variational lower bound on the log likelihood of the entire corpus */ @Since("2.0.0") def logLikelihood(dataset: Dataset[_]): Double = { val oldDataset = LDA.getOldDataset(dataset, $(featuresCol)) oldLocalModel.logLikelihood(oldDataset) } /** * Calculate an upper bound on perplexity. (Lower is better.) * See Equation (16) in the Online LDA paper (Hoffman et al., 2010). * * WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]] * is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver. * This implementation may be changed in the future. * * @param dataset test corpus to use for calculating perplexity * @return Variational upper bound on log perplexity per token. */ @Since("2.0.0") def logPerplexity(dataset: Dataset[_]): Double = { val oldDataset = LDA.getOldDataset(dataset, $(featuresCol)) oldLocalModel.logPerplexity(oldDataset) } /** * Return the topics described by their top-weighted terms. * * @param maxTermsPerTopic Maximum number of terms to collect for each topic. * Default value of 10. * @return Local DataFrame with one topic per Row, with columns: * - "topic": IntegerType: topic index * - "termIndices": ArrayType(IntegerType): term indices, sorted in order of decreasing * term importance * - "termWeights": ArrayType(DoubleType): corresponding sorted term weights */ @Since("1.6.0") def describeTopics(maxTermsPerTopic: Int): DataFrame = { val topics = getModel.describeTopics(maxTermsPerTopic).zipWithIndex.map { case ((termIndices, termWeights), topic) => (topic, termIndices.toSeq, termWeights.toSeq) } sparkSession.createDataFrame(topics).toDF("topic", "termIndices", "termWeights") } @Since("1.6.0") def describeTopics(): DataFrame = describeTopics(10) } /** * * Local (non-distributed) model fitted by [[LDA]]. * * This model stores the inferred topics only; it does not store info about the training dataset. */ @Since("1.6.0") class LocalLDAModel private[ml] ( uid: String, vocabSize: Int, @Since("1.6.0") override private[clustering] val oldLocalModel: OldLocalLDAModel, sparkSession: SparkSession) extends LDAModel(uid, vocabSize, sparkSession) { @Since("1.6.0") override def copy(extra: ParamMap): LocalLDAModel = { val copied = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession) copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel] } override private[clustering] def getModel: OldLDAModel = oldLocalModel @Since("1.6.0") override def isDistributed: Boolean = false @Since("1.6.0") override def write: MLWriter = new LocalLDAModel.LocalLDAModelWriter(this) } @Since("1.6.0") object LocalLDAModel extends MLReadable[LocalLDAModel] { private[LocalLDAModel] class LocalLDAModelWriter(instance: LocalLDAModel) extends MLWriter { private case class Data( vocabSize: Int, topicsMatrix: Matrix, docConcentration: Vector, topicConcentration: Double, gammaShape: Double) override protected def saveImpl(path: String): Unit = { DefaultParamsWriter.saveMetadata(instance, path, sc) val oldModel = instance.oldLocalModel val data = Data(instance.vocabSize, oldModel.topicsMatrix, oldModel.docConcentration, oldModel.topicConcentration, oldModel.gammaShape) val dataPath = new Path(path, "data").toString sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath) } } private class LocalLDAModelReader extends MLReader[LocalLDAModel] { private val className = classOf[LocalLDAModel].getName override def load(path: String): LocalLDAModel = { val metadata = DefaultParamsReader.loadMetadata(path, sc, className) val dataPath = new Path(path, "data").toString val data = sparkSession.read.parquet(dataPath) val vectorConverted = MLUtils.convertVectorColumnsToML(data, "docConcentration") val matrixConverted = MLUtils.convertMatrixColumnsToML(vectorConverted, "topicsMatrix") val Row(vocabSize: Int, topicsMatrix: Matrix, docConcentration: Vector, topicConcentration: Double, gammaShape: Double) = matrixConverted.select("vocabSize", "topicsMatrix", "docConcentration", "topicConcentration", "gammaShape").head() val oldModel = new OldLocalLDAModel(topicsMatrix, docConcentration, topicConcentration, gammaShape) val model = new LocalLDAModel(metadata.uid, vocabSize, oldModel, sparkSession) LDAParams.getAndSetParams(model, metadata) model } } @Since("1.6.0") override def read: MLReader[LocalLDAModel] = new LocalLDAModelReader @Since("1.6.0") override def load(path: String): LocalLDAModel = super.load(path) } /** * * Distributed model fitted by [[LDA]]. * This type of model is currently only produced by Expectation-Maximization (EM). * * This model stores the inferred topics, the full training dataset, and the topic distribution * for each training document. * * @param oldLocalModelOption Used to implement [[oldLocalModel]] as a lazy val, but keeping * `copy()` cheap. */ @Since("1.6.0") class DistributedLDAModel private[ml] ( uid: String, vocabSize: Int, private val oldDistributedModel: OldDistributedLDAModel, sparkSession: SparkSession, private var oldLocalModelOption: Option[OldLocalLDAModel]) extends LDAModel(uid, vocabSize, sparkSession) { override private[clustering] def oldLocalModel: OldLocalLDAModel = { if (oldLocalModelOption.isEmpty) { oldLocalModelOption = Some(oldDistributedModel.toLocal) } oldLocalModelOption.get } override private[clustering] def getModel: OldLDAModel = oldDistributedModel /** * Convert this distributed model to a local representation. This discards info about the * training dataset. * * WARNING: This involves collecting a large [[topicsMatrix]] to the driver. */ @Since("1.6.0") def toLocal: LocalLDAModel = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession) @Since("1.6.0") override def copy(extra: ParamMap): DistributedLDAModel = { val copied = new DistributedLDAModel( uid, vocabSize, oldDistributedModel, sparkSession, oldLocalModelOption) copyValues(copied, extra).setParent(parent) copied } @Since("1.6.0") override def isDistributed: Boolean = true /** * Log likelihood of the observed tokens in the training set, * given the current parameter estimates: * log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters) * * Notes: * - This excludes the prior; for that, use [[logPrior]]. * - Even with [[logPrior]], this is NOT the same as the data log likelihood given the * hyperparameters. * - This is computed from the topic distributions computed during training. If you call * `logLikelihood()` on the same training dataset, the topic distributions will be computed * again, possibly giving different results. */ @Since("1.6.0") lazy val trainingLogLikelihood: Double = oldDistributedModel.logLikelihood /** * Log probability of the current parameter estimate: * log P(topics, topic distributions for docs | Dirichlet hyperparameters) */ @Since("1.6.0") lazy val logPrior: Double = oldDistributedModel.logPrior private var _checkpointFiles: Array[String] = oldDistributedModel.checkpointFiles /** * :: DeveloperApi :: * * If using checkpointing and `LDA.keepLastCheckpoint` is set to true, then there may be * saved checkpoint files. This method is provided so that users can manage those files. * * Note that removing the checkpoints can cause failures if a partition is lost and is needed * by certain [[DistributedLDAModel]] methods. Reference counting will clean up the checkpoints * when this model and derivative data go out of scope. * * @return Checkpoint files from training */ @DeveloperApi @Since("2.0.0") def getCheckpointFiles: Array[String] = _checkpointFiles /** * :: DeveloperApi :: * * Remove any remaining checkpoint files from training. * * @see [[getCheckpointFiles]] */ @DeveloperApi @Since("2.0.0") def deleteCheckpointFiles(): Unit = { val hadoopConf = sparkSession.sparkContext.hadoopConfiguration _checkpointFiles.foreach(PeriodicCheckpointer.removeCheckpointFile(_, hadoopConf)) _checkpointFiles = Array.empty[String] } @Since("1.6.0") override def write: MLWriter = new DistributedLDAModel.DistributedWriter(this) } @Since("1.6.0") object DistributedLDAModel extends MLReadable[DistributedLDAModel] { private[DistributedLDAModel] class DistributedWriter(instance: DistributedLDAModel) extends MLWriter { override protected def saveImpl(path: String): Unit = { DefaultParamsWriter.saveMetadata(instance, path, sc) val modelPath = new Path(path, "oldModel").toString instance.oldDistributedModel.save(sc, modelPath) } } private class DistributedLDAModelReader extends MLReader[DistributedLDAModel] { private val className = classOf[DistributedLDAModel].getName override def load(path: String): DistributedLDAModel = { val metadata = DefaultParamsReader.loadMetadata(path, sc, className) val modelPath = new Path(path, "oldModel").toString val oldModel = OldDistributedLDAModel.load(sc, modelPath) val model = new DistributedLDAModel(metadata.uid, oldModel.vocabSize, oldModel, sparkSession, None) LDAParams.getAndSetParams(model, metadata) model } } @Since("1.6.0") override def read: MLReader[DistributedLDAModel] = new DistributedLDAModelReader @Since("1.6.0") override def load(path: String): DistributedLDAModel = super.load(path) } /** * * Latent Dirichlet Allocation (LDA), a topic model designed for text documents. * * Terminology: * - "term" = "word": an element of the vocabulary * - "token": instance of a term appearing in a document * - "topic": multinomial distribution over terms representing some concept * - "document": one piece of text, corresponding to one row in the input data * * Original LDA paper (journal version): * Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003. * * Input data (featuresCol): * LDA is given a collection of documents as input data, via the featuresCol parameter. * Each document is specified as a `Vector` of length vocabSize, where each entry is the * count for the corresponding term (word) in the document. Feature transformers such as * [[org.apache.spark.ml.feature.Tokenizer]] and [[org.apache.spark.ml.feature.CountVectorizer]] * can be useful for converting text to word count vectors. * * @see <a href="http://en.wikipedia.org/wiki/Latent_Dirichlet_allocation"> * Latent Dirichlet allocation (Wikipedia)</a> */ @Since("1.6.0") class LDA @Since("1.6.0") ( @Since("1.6.0") override val uid: String) extends Estimator[LDAModel] with LDAParams with DefaultParamsWritable { @Since("1.6.0") def this() = this(Identifiable.randomUID("lda")) setDefault(maxIter -> 20, k -> 10, optimizer -> "online", checkpointInterval -> 10, learningOffset -> 1024, learningDecay -> 0.51, subsamplingRate -> 0.05, optimizeDocConcentration -> true, keepLastCheckpoint -> true) /** * The features for LDA should be a `Vector` representing the word counts in a document. * The vector should be of length vocabSize, with counts for each term (word). * * @group setParam */ @Since("1.6.0") def setFeaturesCol(value: String): this.type = set(featuresCol, value) /** @group setParam */ @Since("1.6.0") def setMaxIter(value: Int): this.type = set(maxIter, value) /** @group setParam */ @Since("1.6.0") def setSeed(value: Long): this.type = set(seed, value) /** @group setParam */ @Since("1.6.0") def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value) /** @group setParam */ @Since("1.6.0") def setK(value: Int): this.type = set(k, value) /** @group setParam */ @Since("1.6.0") def setDocConcentration(value: Array[Double]): this.type = set(docConcentration, value) /** @group setParam */ @Since("1.6.0") def setDocConcentration(value: Double): this.type = set(docConcentration, Array(value)) /** @group setParam */ @Since("1.6.0") def setTopicConcentration(value: Double): this.type = set(topicConcentration, value) /** @group setParam */ @Since("1.6.0") def setOptimizer(value: String): this.type = set(optimizer, value) /** @group setParam */ @Since("1.6.0") def setTopicDistributionCol(value: String): this.type = set(topicDistributionCol, value) /** @group expertSetParam */ @Since("1.6.0") def setLearningOffset(value: Double): this.type = set(learningOffset, value) /** @group expertSetParam */ @Since("1.6.0") def setLearningDecay(value: Double): this.type = set(learningDecay, value) /** @group setParam */ @Since("1.6.0") def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value) /** @group expertSetParam */ @Since("1.6.0") def setOptimizeDocConcentration(value: Boolean): this.type = set(optimizeDocConcentration, value) /** @group expertSetParam */ @Since("2.0.0") def setKeepLastCheckpoint(value: Boolean): this.type = set(keepLastCheckpoint, value) @Since("1.6.0") override def copy(extra: ParamMap): LDA = defaultCopy(extra) @Since("2.0.0") override def fit(dataset: Dataset[_]): LDAModel = { transformSchema(dataset.schema, logging = true) val instr = Instrumentation.create(this, dataset) instr.logParams(featuresCol, topicDistributionCol, k, maxIter, subsamplingRate, checkpointInterval, keepLastCheckpoint, optimizeDocConcentration, topicConcentration, learningDecay, optimizer, learningOffset, seed) val oldLDA = new OldLDA() .setK($(k)) .setDocConcentration(getOldDocConcentration) .setTopicConcentration(getOldTopicConcentration) .setMaxIterations($(maxIter)) .setSeed($(seed)) .setCheckpointInterval($(checkpointInterval)) .setOptimizer(getOldOptimizer) // TODO: persist here, or in old LDA? val oldData = LDA.getOldDataset(dataset, $(featuresCol)) val oldModel = oldLDA.run(oldData) val newModel = oldModel match { case m: OldLocalLDAModel => new LocalLDAModel(uid, m.vocabSize, m, dataset.sparkSession) case m: OldDistributedLDAModel => new DistributedLDAModel(uid, m.vocabSize, m, dataset.sparkSession, None) } instr.logNumFeatures(newModel.vocabSize) val model = copyValues(newModel).setParent(this) instr.logSuccess(model) model } @Since("1.6.0") override def transformSchema(schema: StructType): StructType = { validateAndTransformSchema(schema) } } @Since("2.0.0") object LDA extends MLReadable[LDA] { /** Get dataset for spark.mllib LDA */ private[clustering] def getOldDataset( dataset: Dataset[_], featuresCol: String): RDD[(Long, OldVector)] = { dataset .withColumn("docId", monotonically_increasing_id()) .select("docId", featuresCol) .rdd .map { case Row(docId: Long, features: Vector) => (docId, OldVectors.fromML(features)) } } private class LDAReader extends MLReader[LDA] { private val className = classOf[LDA].getName override def load(path: String): LDA = { val metadata = DefaultParamsReader.loadMetadata(path, sc, className) val model = new LDA(metadata.uid) LDAParams.getAndSetParams(model, metadata) model } } override def read: MLReader[LDA] = new LDAReader @Since("2.0.0") override def load(path: String): LDA = super.load(path) }
Java
function TorneoGolfWindow(Window) { window1 = Titanium.UI.createWindow({ tabBarHidden : true, backgroundColor : "white", width : '100%', height : '100%', layout : 'vertical' }); table = Ti.UI.createTableView({ width : '90%', height : '100%' }); scrollView_1 = Titanium.UI.createView({ id : "scrollView_1", backgroundImage : '/images/background.png', height : '100%', width : '100%', layout : 'vertical' }); scrollView_1.add(table); imageViewBar = Titanium.UI.createView({ id : "imageViewBar", backgroundColor : Ti.App.Properties.getString('viewcolor'), height : 80, left : 0, top : 0, width : '100%', layout : 'horizontal' }); imageView = Titanium.UI.createImageView({ id : "imageView", image : "/images/icongolf.png", width : 60, height : 60, top : 7, right : 3 }); imageViewBar.add(imageView); labelTitulo = Titanium.UI.createLabel({ id : "labelTitulo", height : 'auto', width : '70%', text : L('golf'), font : { fontSize : '22dp' }, color : 'white', textAlign : Ti.UI.TEXT_ALIGNMENT_CENTER }); imageViewBar.add(labelTitulo); buttonClose = Titanium.UI.createImageView({ id : "buttonClose", image : "/images/close.png", width : 30, height : 30, top : 25 }); imageViewBar.add(buttonClose); window1.add(imageViewBar); window1.add(scrollView_1); function populateTable() { var data = []; var row = Titanium.UI.createTableViewRow({ id : 2, title : 'Horarios', leftImage : '/images/horarios.png', isparent : true, opened : false, hasChild : false, font : { fontSize : '22dp' }, color : 'black' }); data.push(row); var row = Titanium.UI.createTableViewRow({ id : 3, title : 'Mapa', leftImage : '/images/mapa.png', isparent : true, opened : false, hasChild : false, font : { fontSize : '22dp' }, color : 'black' }); data.push(row); table.setData(data); } populateTable(); table.addEventListener('click', function(e) { if (e.rowData.id == 2) { var Window; var mainWindow = require("ui/handheld/golf/HorariosWindow"); new mainWindow(Window).open(); } else if (e.rowData.id == 3) { var Window; var mainWindow = require("ui/handheld/mapa/MapaWindow"); new mainWindow(Window).open(); } }); buttonClose.addEventListener('click', function(e) { Ti.Media.vibrate(); var Window; var mainWindow = require("ui/handheld/MainWindow"); new mainWindow(Window).open(); }); window1.addEventListener('android:back', function(e) { Ti.Media.vibrate(); var Window; var mainWindow = require("ui/handheld/MainWindow"); new mainWindow(Window).open(); }); return window1; } module.exports = TorneoGolfWindow;
Java
/* * File: NetWork.h * Author: guoxinhua * * Created on 2014年9月24日, 下午5:14 */ #ifndef CP_NETWORK_H #define CP_NETWORK_H #ifdef __cplusplus extern "C" { #endif #define CP_REACTOR_MAXEVENTS 4096 #define CP_MAX_EVENT 1024 #define CP_BUFFER_SIZE (1024*1024) #define CP_MAX_UINT 4294967295 #define EPOLL_CLOSE 10 #define CP_CLIENT_EOF_STR "\r\n^CON^eof\r\n" #define CP_TOO_MANY_CON "not enough con" #define CP_TOO_MANY_CON_ERR "ERROR!not enough con" #define CP_MULTI_PROCESS_ERR "ERROR!the connection object create in parent process and use in multi process,please create in every process" #define CP_CLIENT_EOF_LEN strlen(CP_CLIENT_EOF_STR) #define CP_HEADER_CON_SUCCESS "CON_SUCCESS!" #define CP_HEADER_ERROR "ERROR!" #define CP_PDO_HEADER_STATE "PDOStatement!" #define CP_RELEASE_HEADER "r" #define CP_RELEASE_HEADER_LEN 1 typedef int (*epoll_wait_handle)(int fd); int cpEpoll_add(int epfd, int fd, int fdtype); int cpEpoll_set(int fd, int fdtype); int cpEpoll_del(int epfd, int fd); int cpEpoll_wait(epoll_wait_handle*, struct timeval *timeo, int epfd); void cpEpoll_free(); CPINLINE int cpEpoll_event_set(int fdtype); #ifdef __cplusplus } #endif #endif /* NETWORK_H */
Java
<div id="failover_confirmation_dialog" style="width:711px"> <h1 class="dialog_corner_radius">Confirm Node Fail Over for {{serversFailOverDialogCtl.node.hostname}}</h1> <div> <div mn-spinner="serversFailOverDialogCtl.viewLoading"> <div class="pas_20"> <div class="failover_warning pat_20"> <div ng-show="!serversFailOverDialogCtl.status.down && !serversFailOverDialogCtl.status.dataless"> <h2>Fail Over Options</h2> <label> <input type="radio" name="failOver" ng-model="serversFailOverDialogCtl.status.failOver" value="startGracefulFailover" ng-disabled="!serversFailOverDialogCtl.status.gracefulFailoverPossible"> <span>Graceful Fail Over (default).</span> </label> <label> <input type="radio" name="failOver" ng-model="serversFailOverDialogCtl.status.failOver" value="failOver"> <span>Hard Fail Over - If you use hard failover option on a functioning node it may result in data loss. This is because failover will immediately remove the node from the cluster and any data that has not yet been replicated to other nodes may be permanently lost if it had not been persisted to disk.</span> </label> <div class="warning js_gracefull_failover_message" style="margin-bottom: 15px;" ng-if="!serversFailOverDialogCtl.status.gracefulFailoverPossible"> <strong>Attention</strong> – Graceful fail over option is not available either because node is unreachable or replica vbucket cannot be activated gracefully. </div> <div class="warning js_warning" style="margin-top: 15px;" ng-show="serversFailOverDialogCtl.status.backfill && (serversFailOverDialogCtl.status.failOver === 'failOver')"> <strong>Attention</strong> – A significant amount of data stored on this node does not yet have replica (backup) copies! Failing over the node now will irrecoverably lose that data when the incomplete replica is activated and this node is removed from the cluster. It is recommended to select "Remove Server" and rebalance to safely remove the node without any data loss. <label> <input type="checkbox" name="confirmation" ng-model="serversFailOverDialogCtl.status.confirmation"> Please confirm Failover. </label> </div> <div class="warning js_warning" style="margin-top: 15px;" ng-show="!serversFailOverDialogCtl.status.backfill && (serversFailOverDialogCtl.status.failOver === 'failOver')"> <strong>Warning</strong> – Failing over the node will remove it from the cluster and activate a replica. Operations currently in flight and not yet replicated, will be lost. Rebalancing will be required to add the node back into the cluster. Consider using "Remove from Cluster" and rebalancing instead of Failover, to avoid any loss of data. Please confirm Failover. </div> </div> <div ng-show="serversFailOverDialogCtl.status.down && !serversFailOverDialogCtl.status.dataless"> <div class="warning" ng-show="serversFailOverDialogCtl.status.backfill"> <strong>Attention</strong> – There are not replica (backup) copies of all data on this node! Failing over the node now will irrecoverably lose that data when the incomplete replica is activated and this node is removed from the cluster. If the node might come back online, it is recommended to wait. Check this box if you want to failover the node, despite the resulting data loss <label> <input type="checkbox" name="confirmation" ng-model="serversFailOverDialogCtl.status.confirmation"> Please confirm Failover.</label> </div> <div class="warning" ng-show="!serversFailOverDialogCtl.status.backfill"> <strong>Warning</strong> – Failing over the node will remove it from the cluster and activate a replica. Operations not replicated before the node became unresponsive, will be lost. Rebalancing will be required to add the node back into the cluster. Please confirm Failover. </div> </div> <div class="failover_warning pat_20" ng-if="serversFailOverDialogCtl.status.dataless"> <div class="warning"> <strong>Note</strong> – Failing over this node (which has no data) will remove it from the cluster. Rebalancing will be required to add the node back into the cluster. Please confirm Failover. </div> </div> </div> </div> <div class="right save_cancel"> <button type="submit" class="save_button float_right" ng-click="serversFailOverDialogCtl.onSubmit()" ng-model="button" ng-disabled="serversFailOverDialogCtl.isFailOverBtnDisabled()">Fail Over</button> <a class="close casper_close_failover_confirmation_dialog cancel_button float_right" ng-click="$dismiss()">Cancel</a> </div> </div> </div> </div>
Java
package eu.atos.sla.dao.jpa; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; import javax.persistence.TypedQuery; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import eu.atos.sla.dao.ITemplateDAO; import eu.atos.sla.datamodel.ITemplate; import eu.atos.sla.datamodel.bean.Template; @Repository("TemplateRepository") public class TemplateDAOJpa implements ITemplateDAO { private static Logger logger = LoggerFactory.getLogger(TemplateDAOJpa.class); private EntityManager entityManager; @PersistenceContext(unitName = "slarepositoryDB") public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } public EntityManager getEntityManager() { return entityManager; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getById(Long id) { return entityManager.find(Template.class, id); } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getByUuid(String uuid) { try { Query query = entityManager .createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); Template template = null; template = (Template) query.getSingleResult(); return template; } catch (NoResultException e) { logger.debug("No Result found: " + e); return null; } } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> search(String providerId, String []serviceIds) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_SEARCH, ITemplate.class); query.setParameter("providerId", providerId); query.setParameter("serviceIds", (serviceIds!=null)?Arrays.asList(serviceIds):null); query.setParameter("flagServiceIds", (serviceIds!=null)?"flag":null); logger.debug("providerId:{} - serviceIds:{}" , providerId, (serviceIds!=null)?Arrays.asList(serviceIds):null); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getByAgreement(String agreement) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_BY_AGREEMENT, ITemplate.class); query.setParameter("agreement", agreement); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getAll() { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_ALL, ITemplate.class); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Override @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public ITemplate save(ITemplate template) { logger.info("template.getUuid() "+template.getUuid()); entityManager.persist(template); entityManager.flush(); return template; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean update(String uuid, ITemplate template) { Template templateDB = null; try { Query query = entityManager.createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); templateDB = (Template)query.getSingleResult(); } catch (NoResultException e) { logger.debug("No Result found: " + e); } if (templateDB!=null){ template.setId(templateDB.getId()); logger.info("template to update with id"+template.getId()); entityManager.merge(template); entityManager.flush(); }else return false; return true; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean delete(ITemplate template) { try { Template templateDeleted = entityManager.getReference(Template.class, template.getId()); entityManager.remove(templateDeleted); entityManager.flush(); return true; } catch (EntityNotFoundException e) { logger.debug("Template[{}] not found", template.getId()); return false; } } }
Java
/*-------------------------------------------------------------------------- * linq.js - LINQ for JavaScript * ver 3.0.3-Beta4 (Oct. 9th, 2012) * * created and maintained by neuecc <[email protected]> * licensed under MIT License * http://linqjs.codeplex.com/ *------------------------------------------------------------------------*/ (function (root, undefined) { // ReadOnly Function var Functions = { Identity: function (x) { return x; }, True: function () { return true; }, Blank: function () { } }; // const Type var Types = { Boolean: typeof true, Number: typeof 0, String: typeof "", Object: typeof {}, Undefined: typeof undefined, Function: typeof function () { } }; // private utility methods var Utils = { // Create anonymous function from lambda expression string createLambda: function (expression) { if (expression == null) return Functions.Identity; if (typeof expression == Types.String) { if (expression == "") { return Functions.Identity; } else if (expression.indexOf("=>") == -1) { var regexp = new RegExp("[$]+", "g"); var maxLength = 0; var match; while (match = regexp.exec(expression)) { var paramNumber = match[0].length; if (paramNumber > maxLength) { maxLength = paramNumber; } } var argArray = []; for (var i = 1; i <= maxLength; i++) { var dollar = ""; for (var j = 0; j < i; j++) { dollar += "$"; } argArray.push(dollar); } var args = Array.prototype.join.call(argArray, ","); return new Function(args, "return " + expression); } else { var expr = expression.match(/^[(\s]*([^()]*?)[)\s]*=>(.*)/); return new Function(expr[1], "return " + expr[2]); } } return expression; }, isIEnumerable: function (obj) { if (typeof Enumerator !== Types.Undefined) { try { new Enumerator(obj); // check JScript(IE)'s Enumerator return true; } catch (e) { } } return false; }, // IE8's defineProperty is defined but cannot use, therefore check defineProperties defineProperty: (Object.defineProperties != null) ? function (target, methodName, value) { Object.defineProperty(target, methodName, { enumerable: false, configurable: true, writable: true, value: value }) } : function (target, methodName, value) { target[methodName] = value; }, compare: function (a, b) { return (a === b) ? 0 : (a > b) ? 1 : -1; }, dispose: function (obj) { if (obj != null) obj.dispose(); } }; // IEnumerator State var State = { Before: 0, Running: 1, After: 2 }; // "Enumerator" is conflict JScript's "Enumerator" var IEnumerator = function (initialize, tryGetNext, dispose) { var yielder = new Yielder(); var state = State.Before; this.current = yielder.current; this.moveNext = function () { try { switch (state) { case State.Before: state = State.Running; initialize(); // fall through case State.Running: if (tryGetNext.apply(yielder)) { return true; } else { this.dispose(); return false; } case State.After: return false; } } catch (e) { this.dispose(); throw e; } }; this.dispose = function () { if (state != State.Running) return; try { dispose(); } finally { state = State.After; } }; }; // for tryGetNext var Yielder = function () { var current = null; this.current = function () { return current; }; this.yieldReturn = function (value) { current = value; return true; }; this.yieldBreak = function () { return false; }; }; // Enumerable constuctor var Enumerable = function (getEnumerator) { this.getEnumerator = getEnumerator; }; // Utility Enumerable.Utils = {}; // container Enumerable.Utils.createLambda = function (expression) { return Utils.createLambda(expression); }; Enumerable.Utils.createEnumerable = function (getEnumerator) { return new Enumerable(getEnumerator); }; Enumerable.Utils.createEnumerator = function (initialize, tryGetNext, dispose) { return new IEnumerator(initialize, tryGetNext, dispose); }; Enumerable.Utils.extendTo = function (type) { var typeProto = type.prototype; var enumerableProto; if (type === Array) { enumerableProto = ArrayEnumerable.prototype; Utils.defineProperty(typeProto, "getSource", function () { return this; }); } else { enumerableProto = Enumerable.prototype; Utils.defineProperty(typeProto, "getEnumerator", function () { return Enumerable.from(this).getEnumerator(); }); } for (var methodName in enumerableProto) { var func = enumerableProto[methodName]; // already extended if (typeProto[methodName] == func) continue; // already defined(example Array#reverse/join/forEach...) if (typeProto[methodName] != null) { methodName = methodName + "ByLinq"; if (typeProto[methodName] == func) continue; // recheck } if (func instanceof Function) { Utils.defineProperty(typeProto, methodName, func); } } }; // Generator Enumerable.choice = function () // variable argument { var args = arguments; return new Enumerable(function () { return new IEnumerator( function () { args = (args[0] instanceof Array) ? args[0] : (args[0].getEnumerator != null) ? args[0].toArray() : args; }, function () { return this.yieldReturn(args[Math.floor(Math.random() * args.length)]); }, Functions.Blank); }); }; Enumerable.cycle = function () // variable argument { var args = arguments; return new Enumerable(function () { var index = 0; return new IEnumerator( function () { args = (args[0] instanceof Array) ? args[0] : (args[0].getEnumerator != null) ? args[0].toArray() : args; }, function () { if (index >= args.length) index = 0; return this.yieldReturn(args[index++]); }, Functions.Blank); }); }; Enumerable.empty = function () { return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return false; }, Functions.Blank); }); }; Enumerable.from = function (obj) { if (obj == null) { return Enumerable.empty(); } if (obj instanceof Enumerable) { return obj; } if (typeof obj == Types.Number || typeof obj == Types.Boolean) { return Enumerable.repeat(obj, 1); } if (typeof obj == Types.String) { return new Enumerable(function () { var index = 0; return new IEnumerator( Functions.Blank, function () { return (index < obj.length) ? this.yieldReturn(obj.charAt(index++)) : false; }, Functions.Blank); }); } if (typeof obj != Types.Function) { // array or array like object if (typeof obj.length == Types.Number) { return new ArrayEnumerable(obj); } // JScript's IEnumerable if (!(obj instanceof Object) && Utils.isIEnumerable(obj)) { return new Enumerable(function () { var isFirst = true; var enumerator; return new IEnumerator( function () { enumerator = new Enumerator(obj); }, function () { if (isFirst) isFirst = false; else enumerator.moveNext(); return (enumerator.atEnd()) ? false : this.yieldReturn(enumerator.item()); }, Functions.Blank); }); } // WinMD IIterable<T> if (typeof Windows === Types.Object && typeof obj.first === Types.Function) { return new Enumerable(function () { var isFirst = true; var enumerator; return new IEnumerator( function () { enumerator = obj.first(); }, function () { if (isFirst) isFirst = false; else enumerator.moveNext(); return (enumerator.hasCurrent) ? this.yieldReturn(enumerator.current) : this.yieldBreak(); }, Functions.Blank); }); } } // case function/object : Create keyValuePair[] return new Enumerable(function () { var array = []; var index = 0; return new IEnumerator( function () { for (var key in obj) { var value = obj[key]; if (!(value instanceof Function) && Object.prototype.hasOwnProperty.call(obj, key)) { array.push({ key: key, value: value }); } } }, function () { return (index < array.length) ? this.yieldReturn(array[index++]) : false; }, Functions.Blank); }); }, Enumerable.make = function (element) { return Enumerable.repeat(element, 1); }; // Overload:function(input, pattern) // Overload:function(input, pattern, flags) Enumerable.matches = function (input, pattern, flags) { if (flags == null) flags = ""; if (pattern instanceof RegExp) { flags += (pattern.ignoreCase) ? "i" : ""; flags += (pattern.multiline) ? "m" : ""; pattern = pattern.source; } if (flags.indexOf("g") === -1) flags += "g"; return new Enumerable(function () { var regex; return new IEnumerator( function () { regex = new RegExp(pattern, flags); }, function () { var match = regex.exec(input); return (match) ? this.yieldReturn(match) : false; }, Functions.Blank); }); }; // Overload:function(start, count) // Overload:function(start, count, step) Enumerable.range = function (start, count, step) { if (step == null) step = 1; return new Enumerable(function () { var value; var index = 0; return new IEnumerator( function () { value = start - step; }, function () { return (index++ < count) ? this.yieldReturn(value += step) : this.yieldBreak(); }, Functions.Blank); }); }; // Overload:function(start, count) // Overload:function(start, count, step) Enumerable.rangeDown = function (start, count, step) { if (step == null) step = 1; return new Enumerable(function () { var value; var index = 0; return new IEnumerator( function () { value = start + step; }, function () { return (index++ < count) ? this.yieldReturn(value -= step) : this.yieldBreak(); }, Functions.Blank); }); }; // Overload:function(start, to) // Overload:function(start, to, step) Enumerable.rangeTo = function (start, to, step) { if (step == null) step = 1; if (start < to) { return new Enumerable(function () { var value; return new IEnumerator( function () { value = start - step; }, function () { var next = value += step; return (next <= to) ? this.yieldReturn(next) : this.yieldBreak(); }, Functions.Blank); }); } else { return new Enumerable(function () { var value; return new IEnumerator( function () { value = start + step; }, function () { var next = value -= step; return (next >= to) ? this.yieldReturn(next) : this.yieldBreak(); }, Functions.Blank); }); } }; // Overload:function(element) // Overload:function(element, count) Enumerable.repeat = function (element, count) { if (count != null) return Enumerable.repeat(element).take(count); return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return this.yieldReturn(element); }, Functions.Blank); }); }; Enumerable.repeatWithFinalize = function (initializer, finalizer) { initializer = Utils.createLambda(initializer); finalizer = Utils.createLambda(finalizer); return new Enumerable(function () { var element; return new IEnumerator( function () { element = initializer(); }, function () { return this.yieldReturn(element); }, function () { if (element != null) { finalizer(element); element = null; } }); }); }; // Overload:function(func) // Overload:function(func, count) Enumerable.generate = function (func, count) { if (count != null) return Enumerable.generate(func).take(count); func = Utils.createLambda(func); return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return this.yieldReturn(func()); }, Functions.Blank); }); }; // Overload:function() // Overload:function(start) // Overload:function(start, step) Enumerable.toInfinity = function (start, step) { if (start == null) start = 0; if (step == null) step = 1; return new Enumerable(function () { var value; return new IEnumerator( function () { value = start - step; }, function () { return this.yieldReturn(value += step); }, Functions.Blank); }); }; // Overload:function() // Overload:function(start) // Overload:function(start, step) Enumerable.toNegativeInfinity = function (start, step) { if (start == null) start = 0; if (step == null) step = 1; return new Enumerable(function () { var value; return new IEnumerator( function () { value = start + step; }, function () { return this.yieldReturn(value -= step); }, Functions.Blank); }); }; Enumerable.unfold = function (seed, func) { func = Utils.createLambda(func); return new Enumerable(function () { var isFirst = true; var value; return new IEnumerator( Functions.Blank, function () { if (isFirst) { isFirst = false; value = seed; return this.yieldReturn(value); } value = func(value); return this.yieldReturn(value); }, Functions.Blank); }); }; Enumerable.defer = function (enumerableFactory) { return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = Enumerable.from(enumerableFactory()).getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; // Extension Methods /* Projection and Filtering Methods */ // Overload:function(func) // Overload:function(func, resultSelector<element>) // Overload:function(func, resultSelector<element, nestLevel>) Enumerable.prototype.traverseBreadthFirst = function (func, resultSelector) { var source = this; func = Utils.createLambda(func); resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumerator; var nestLevel = 0; var buffer = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (enumerator.moveNext()) { buffer.push(enumerator.current()); return this.yieldReturn(resultSelector(enumerator.current(), nestLevel)); } var next = Enumerable.from(buffer).selectMany(function (x) { return func(x); }); if (!next.any()) { return false; } else { nestLevel++; buffer = []; Utils.dispose(enumerator); enumerator = next.getEnumerator(); } } }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(func) // Overload:function(func, resultSelector<element>) // Overload:function(func, resultSelector<element, nestLevel>) Enumerable.prototype.traverseDepthFirst = function (func, resultSelector) { var source = this; func = Utils.createLambda(func); resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumeratorStack = []; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (enumerator.moveNext()) { var value = resultSelector(enumerator.current(), enumeratorStack.length); enumeratorStack.push(enumerator); enumerator = Enumerable.from(func(enumerator.current())).getEnumerator(); return this.yieldReturn(value); } if (enumeratorStack.length <= 0) return false; Utils.dispose(enumerator); enumerator = enumeratorStack.pop(); } }, function () { try { Utils.dispose(enumerator); } finally { Enumerable.from(enumeratorStack).forEach(function (s) { s.dispose(); }); } }); }); }; Enumerable.prototype.flatten = function () { var source = this; return new Enumerable(function () { var enumerator; var middleEnumerator = null; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (middleEnumerator != null) { if (middleEnumerator.moveNext()) { return this.yieldReturn(middleEnumerator.current()); } else { middleEnumerator = null; } } if (enumerator.moveNext()) { if (enumerator.current() instanceof Array) { Utils.dispose(middleEnumerator); middleEnumerator = Enumerable.from(enumerator.current()) .selectMany(Functions.Identity) .flatten() .getEnumerator(); continue; } else { return this.yieldReturn(enumerator.current()); } } return false; } }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(middleEnumerator); } }); }); }; Enumerable.prototype.pairwise = function (selector) { var source = this; selector = Utils.createLambda(selector); return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); enumerator.moveNext(); }, function () { var prev = enumerator.current(); return (enumerator.moveNext()) ? this.yieldReturn(selector(prev, enumerator.current())) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(func) // Overload:function(seed,func<value,element>) Enumerable.prototype.scan = function (seed, func) { var isUseSeed; if (func == null) { func = Utils.createLambda(seed); // arguments[0] isUseSeed = false; } else { func = Utils.createLambda(func); isUseSeed = true; } var source = this; return new Enumerable(function () { var enumerator; var value; var isFirst = true; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (isFirst) { isFirst = false; if (!isUseSeed) { if (enumerator.moveNext()) { return this.yieldReturn(value = enumerator.current()); } } else { return this.yieldReturn(value = seed); } } return (enumerator.moveNext()) ? this.yieldReturn(value = func(value, enumerator.current())) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(selector<element>) // Overload:function(selector<element,index>) Enumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); if (selector.length <= 1) { return new WhereSelectEnumerable(this, null, selector); } else { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(selector(enumerator.current(), index++)) : false; }, function () { Utils.dispose(enumerator); }); }); } }; // Overload:function(collectionSelector<element>) // Overload:function(collectionSelector<element,index>) // Overload:function(collectionSelector<element>,resultSelector) // Overload:function(collectionSelector<element,index>,resultSelector) Enumerable.prototype.selectMany = function (collectionSelector, resultSelector) { var source = this; collectionSelector = Utils.createLambda(collectionSelector); if (resultSelector == null) resultSelector = function (a, b) { return b; }; resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumerator; var middleEnumerator = undefined; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (middleEnumerator === undefined) { if (!enumerator.moveNext()) return false; } do { if (middleEnumerator == null) { var middleSeq = collectionSelector(enumerator.current(), index++); middleEnumerator = Enumerable.from(middleSeq).getEnumerator(); } if (middleEnumerator.moveNext()) { return this.yieldReturn(resultSelector(enumerator.current(), middleEnumerator.current())); } Utils.dispose(middleEnumerator); middleEnumerator = null; } while (enumerator.moveNext()); return false; }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(middleEnumerator); } }); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); if (predicate.length <= 1) { return new WhereEnumerable(this, predicate); } else { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate(enumerator.current(), index++)) { return this.yieldReturn(enumerator.current()); } } return false; }, function () { Utils.dispose(enumerator); }); }); } }; // Overload:function(selector<element>) // Overload:function(selector<element,index>) Enumerable.prototype.choose = function (selector) { selector = Utils.createLambda(selector); var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { var result = selector(enumerator.current(), index++); if (result != null) { return this.yieldReturn(result); } } return this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.ofType = function (type) { var typeName; switch (type) { case Number: typeName = Types.Number; break; case String: typeName = Types.String; break; case Boolean: typeName = Types.Boolean; break; case Function: typeName = Types.Function; break; default: typeName = null; break; } return (typeName === null) ? this.where(function (x) { return x instanceof type; }) : this.where(function (x) { return typeof x === typeName; }); }; // mutiple arguments, last one is selector, others are enumerable Enumerable.prototype.zip = function () { var args = arguments; var selector = Utils.createLambda(arguments[arguments.length - 1]); var source = this; // optimized case:argument is 2 if (arguments.length == 2) { var second = arguments[0]; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var index = 0; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); secondEnumerator = Enumerable.from(second).getEnumerator(); }, function () { if (firstEnumerator.moveNext() && secondEnumerator.moveNext()) { return this.yieldReturn(selector(firstEnumerator.current(), secondEnumerator.current(), index++)); } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); } else { return new Enumerable(function () { var enumerators; var index = 0; return new IEnumerator( function () { var array = Enumerable.make(source) .concat(Enumerable.from(args).takeExceptLast().select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); enumerators = Enumerable.from(array); }, function () { if (enumerators.all(function (x) { return x.moveNext() })) { var array = enumerators .select(function (x) { return x.current() }) .toArray(); array.push(index++); return this.yieldReturn(selector.apply(null, array)); } else { return this.yieldBreak(); } }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); } }; // mutiple arguments Enumerable.prototype.merge = function () { var args = arguments; var source = this; return new Enumerable(function () { var enumerators; var index = -1; return new IEnumerator( function () { enumerators = Enumerable.make(source) .concat(Enumerable.from(args).select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); }, function () { while (enumerators.length > 0) { index = (index >= enumerators.length - 1) ? 0 : index + 1; var enumerator = enumerators[index]; if (enumerator.moveNext()) { return this.yieldReturn(enumerator.current()); } else { enumerator.dispose(); enumerators.splice(index--, 1); } } return this.yieldBreak(); }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); }; /* Join Methods */ // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector) // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) Enumerable.prototype.join = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) { outerKeySelector = Utils.createLambda(outerKeySelector); innerKeySelector = Utils.createLambda(innerKeySelector); resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var outerEnumerator; var lookup; var innerElements = null; var innerCount = 0; return new IEnumerator( function () { outerEnumerator = source.getEnumerator(); lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector); }, function () { while (true) { if (innerElements != null) { var innerElement = innerElements[innerCount++]; if (innerElement !== undefined) { return this.yieldReturn(resultSelector(outerEnumerator.current(), innerElement)); } innerElement = null; innerCount = 0; } if (outerEnumerator.moveNext()) { var key = outerKeySelector(outerEnumerator.current()); innerElements = lookup.get(key).toArray(); } else { return false; } } }, function () { Utils.dispose(outerEnumerator); }); }); }; // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector) // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) Enumerable.prototype.groupJoin = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) { outerKeySelector = Utils.createLambda(outerKeySelector); innerKeySelector = Utils.createLambda(innerKeySelector); resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator = source.getEnumerator(); var lookup = null; return new IEnumerator( function () { enumerator = source.getEnumerator(); lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector); }, function () { if (enumerator.moveNext()) { var innerElement = lookup.get(outerKeySelector(enumerator.current())); return this.yieldReturn(resultSelector(enumerator.current(), innerElement)); } return false; }, function () { Utils.dispose(enumerator); }); }); }; /* Set Methods */ Enumerable.prototype.all = function (predicate) { predicate = Utils.createLambda(predicate); var result = true; this.forEach(function (x) { if (!predicate(x)) { result = false; return false; // break } }); return result; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.any = function (predicate) { predicate = Utils.createLambda(predicate); var enumerator = this.getEnumerator(); try { if (arguments.length == 0) return enumerator.moveNext(); // case:function() while (enumerator.moveNext()) // case:function(predicate) { if (predicate(enumerator.current())) return true; } return false; } finally { Utils.dispose(enumerator); } }; Enumerable.prototype.isEmpty = function () { return !this.any(); }; // multiple arguments Enumerable.prototype.concat = function () { var source = this; if (arguments.length == 1) { var second = arguments[0]; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); }, function () { if (secondEnumerator == null) { if (firstEnumerator.moveNext()) return this.yieldReturn(firstEnumerator.current()); secondEnumerator = Enumerable.from(second).getEnumerator(); } if (secondEnumerator.moveNext()) return this.yieldReturn(secondEnumerator.current()); return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); } else { var args = arguments; return new Enumerable(function () { var enumerators; return new IEnumerator( function () { enumerators = Enumerable.make(source) .concat(Enumerable.from(args).select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); }, function () { while (enumerators.length > 0) { var enumerator = enumerators[0]; if (enumerator.moveNext()) { return this.yieldReturn(enumerator.current()); } else { enumerator.dispose(); enumerators.splice(0, 1); } } return this.yieldBreak(); }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); } }; Enumerable.prototype.insert = function (index, second) { var source = this; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var count = 0; var isEnumerated = false; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); secondEnumerator = Enumerable.from(second).getEnumerator(); }, function () { if (count == index && secondEnumerator.moveNext()) { isEnumerated = true; return this.yieldReturn(secondEnumerator.current()); } if (firstEnumerator.moveNext()) { count++; return this.yieldReturn(firstEnumerator.current()); } if (!isEnumerated && secondEnumerator.moveNext()) { return this.yieldReturn(secondEnumerator.current()); } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); }; Enumerable.prototype.alternate = function (alternateValueOrSequence) { var source = this; return new Enumerable(function () { var buffer; var enumerator; var alternateSequence; var alternateEnumerator; return new IEnumerator( function () { if (alternateValueOrSequence instanceof Array || alternateValueOrSequence.getEnumerator != null) { alternateSequence = Enumerable.from(Enumerable.from(alternateValueOrSequence).toArray()); // freeze } else { alternateSequence = Enumerable.make(alternateValueOrSequence); } enumerator = source.getEnumerator(); if (enumerator.moveNext()) buffer = enumerator.current(); }, function () { while (true) { if (alternateEnumerator != null) { if (alternateEnumerator.moveNext()) { return this.yieldReturn(alternateEnumerator.current()); } else { alternateEnumerator = null; } } if (buffer == null && enumerator.moveNext()) { buffer = enumerator.current(); // hasNext alternateEnumerator = alternateSequence.getEnumerator(); continue; // GOTO } else if (buffer != null) { var retVal = buffer; buffer = null; return this.yieldReturn(retVal); } return this.yieldBreak(); } }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(alternateEnumerator); } }); }); }; // Overload:function(value) // Overload:function(value, compareSelector) Enumerable.prototype.contains = function (value, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { if (compareSelector(enumerator.current()) === value) return true; } return false; } finally { Utils.dispose(enumerator); } }; Enumerable.prototype.defaultIfEmpty = function (defaultValue) { var source = this; if (defaultValue === undefined) defaultValue = null; return new Enumerable(function () { var enumerator; var isFirst = true; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (enumerator.moveNext()) { isFirst = false; return this.yieldReturn(enumerator.current()); } else if (isFirst) { isFirst = false; return this.yieldReturn(defaultValue); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function() // Overload:function(compareSelector) Enumerable.prototype.distinct = function (compareSelector) { return this.except(Enumerable.empty(), compareSelector); }; Enumerable.prototype.distinctUntilChanged = function (compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var compareKey; var initial; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { var key = compareSelector(enumerator.current()); if (initial) { initial = false; compareKey = key; return this.yieldReturn(enumerator.current()); } if (compareKey === key) { continue; } compareKey = key; return this.yieldReturn(enumerator.current()); } return this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.except = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var keys; return new IEnumerator( function () { enumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); Enumerable.from(second).forEach(function (key) { keys.add(key); }); }, function () { while (enumerator.moveNext()) { var current = enumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.intersect = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var keys; var outs; return new IEnumerator( function () { enumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); Enumerable.from(second).forEach(function (key) { keys.add(key); }); outs = new Dictionary(compareSelector); }, function () { while (enumerator.moveNext()) { var current = enumerator.current(); if (!outs.contains(current) && keys.contains(current)) { outs.add(current); return this.yieldReturn(current); } } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.sequenceEqual = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var firstEnumerator = this.getEnumerator(); try { var secondEnumerator = Enumerable.from(second).getEnumerator(); try { while (firstEnumerator.moveNext()) { if (!secondEnumerator.moveNext() || compareSelector(firstEnumerator.current()) !== compareSelector(secondEnumerator.current())) { return false; } } if (secondEnumerator.moveNext()) return false; return true; } finally { Utils.dispose(secondEnumerator); } } finally { Utils.dispose(firstEnumerator); } }; Enumerable.prototype.union = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var keys; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); }, function () { var current; if (secondEnumerator === undefined) { while (firstEnumerator.moveNext()) { current = firstEnumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } secondEnumerator = Enumerable.from(second).getEnumerator(); } while (secondEnumerator.moveNext()) { current = secondEnumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); }; /* Ordering Methods */ Enumerable.prototype.orderBy = function (keySelector) { return new OrderedEnumerable(this, keySelector, false); }; Enumerable.prototype.orderByDescending = function (keySelector) { return new OrderedEnumerable(this, keySelector, true); }; Enumerable.prototype.reverse = function () { var source = this; return new Enumerable(function () { var buffer; var index; return new IEnumerator( function () { buffer = source.toArray(); index = buffer.length; }, function () { return (index > 0) ? this.yieldReturn(buffer[--index]) : false; }, Functions.Blank); }); }; Enumerable.prototype.shuffle = function () { var source = this; return new Enumerable(function () { var buffer; return new IEnumerator( function () { buffer = source.toArray(); }, function () { if (buffer.length > 0) { var i = Math.floor(Math.random() * buffer.length); return this.yieldReturn(buffer.splice(i, 1)[0]); } return false; }, Functions.Blank); }); }; Enumerable.prototype.weightedSample = function (weightSelector) { weightSelector = Utils.createLambda(weightSelector); var source = this; return new Enumerable(function () { var sortedByBound; var totalWeight = 0; return new IEnumerator( function () { sortedByBound = source .choose(function (x) { var weight = weightSelector(x); if (weight <= 0) return null; // ignore 0 totalWeight += weight; return { value: x, bound: totalWeight }; }) .toArray(); }, function () { if (sortedByBound.length > 0) { var draw = Math.floor(Math.random() * totalWeight) + 1; var lower = -1; var upper = sortedByBound.length; while (upper - lower > 1) { var index = Math.floor((lower + upper) / 2); if (sortedByBound[index].bound >= draw) { upper = index; } else { lower = index; } } return this.yieldReturn(sortedByBound[upper].value); } return this.yieldBreak(); }, Functions.Blank); }); }; /* Grouping Methods */ // Overload:function(keySelector) // Overload:function(keySelector,elementSelector) // Overload:function(keySelector,elementSelector,resultSelector) // Overload:function(keySelector,elementSelector,resultSelector,compareSelector) Enumerable.prototype.groupBy = function (keySelector, elementSelector, resultSelector, compareSelector) { var source = this; keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); if (resultSelector != null) resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.toLookup(keySelector, elementSelector, compareSelector) .toEnumerable() .getEnumerator(); }, function () { while (enumerator.moveNext()) { return (resultSelector == null) ? this.yieldReturn(enumerator.current()) : this.yieldReturn(resultSelector(enumerator.current().key(), enumerator.current())); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(keySelector) // Overload:function(keySelector,elementSelector) // Overload:function(keySelector,elementSelector,resultSelector) // Overload:function(keySelector,elementSelector,resultSelector,compareSelector) Enumerable.prototype.partitionBy = function (keySelector, elementSelector, resultSelector, compareSelector) { var source = this; keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var hasResultSelector; if (resultSelector == null) { hasResultSelector = false; resultSelector = function (key, group) { return new Grouping(key, group); }; } else { hasResultSelector = true; resultSelector = Utils.createLambda(resultSelector); } return new Enumerable(function () { var enumerator; var key; var compareKey; var group = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); if (enumerator.moveNext()) { key = keySelector(enumerator.current()); compareKey = compareSelector(key); group.push(elementSelector(enumerator.current())); } }, function () { var hasNext; while ((hasNext = enumerator.moveNext()) == true) { if (compareKey === compareSelector(keySelector(enumerator.current()))) { group.push(elementSelector(enumerator.current())); } else break; } if (group.length > 0) { var result = (hasResultSelector) ? resultSelector(key, Enumerable.from(group)) : resultSelector(key, group); if (hasNext) { key = keySelector(enumerator.current()); compareKey = compareSelector(key); group = [elementSelector(enumerator.current())]; } else group = []; return this.yieldReturn(result); } return false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.buffer = function (count) { var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { var array = []; var index = 0; while (enumerator.moveNext()) { array.push(enumerator.current()); if (++index >= count) return this.yieldReturn(array); } if (array.length > 0) return this.yieldReturn(array); return false; }, function () { Utils.dispose(enumerator); }); }); }; /* Aggregate Methods */ // Overload:function(func) // Overload:function(seed,func) // Overload:function(seed,func,resultSelector) Enumerable.prototype.aggregate = function (seed, func, resultSelector) { resultSelector = Utils.createLambda(resultSelector); return resultSelector(this.scan(seed, func, resultSelector).last()); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.average = function (selector) { selector = Utils.createLambda(selector); var sum = 0; var count = 0; this.forEach(function (x) { sum += selector(x); ++count; }); return sum / count; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.count = function (predicate) { predicate = (predicate == null) ? Functions.True : Utils.createLambda(predicate); var count = 0; this.forEach(function (x, i) { if (predicate(x, i))++count; }); return count; }; // Overload:function() // Overload:function(selector) Enumerable.prototype.max = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(function (a, b) { return (a > b) ? a : b; }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.min = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(function (a, b) { return (a < b) ? a : b; }); }; Enumerable.prototype.maxBy = function (keySelector) { keySelector = Utils.createLambda(keySelector); return this.aggregate(function (a, b) { return (keySelector(a) > keySelector(b)) ? a : b; }); }; Enumerable.prototype.minBy = function (keySelector) { keySelector = Utils.createLambda(keySelector); return this.aggregate(function (a, b) { return (keySelector(a) < keySelector(b)) ? a : b; }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.sum = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(0, function (a, b) { return a + b; }); }; /* Paging Methods */ Enumerable.prototype.elementAt = function (index) { var value; var found = false; this.forEach(function (x, i) { if (i == index) { value = x; found = true; return false; } }); if (!found) throw new Error("index is less than 0 or greater than or equal to the number of elements in source."); return value; }; Enumerable.prototype.elementAtOrDefault = function (index, defaultValue) { if (defaultValue === undefined) defaultValue = null; var value; var found = false; this.forEach(function (x, i) { if (i == index) { value = x; found = true; return false; } }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.first = function (predicate) { if (predicate != null) return this.where(predicate).first(); var value; var found = false; this.forEach(function (x) { value = x; found = true; return false; }); if (!found) throw new Error("first:No element satisfies the condition."); return value; }; Enumerable.prototype.firstOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).firstOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { value = x; found = true; return false; }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.last = function (predicate) { if (predicate != null) return this.where(predicate).last(); var value; var found = false; this.forEach(function (x) { found = true; value = x; }); if (!found) throw new Error("last:No element satisfies the condition."); return value; }; // Overload:function(defaultValue) // Overload:function(defaultValue,predicate) Enumerable.prototype.lastOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).lastOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { found = true; value = x; }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.single = function (predicate) { if (predicate != null) return this.where(predicate).single(); var value; var found = false; this.forEach(function (x) { if (!found) { found = true; value = x; } else throw new Error("single:sequence contains more than one element."); }); if (!found) throw new Error("single:No element satisfies the condition."); return value; }; // Overload:function(defaultValue) // Overload:function(defaultValue,predicate) Enumerable.prototype.singleOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).singleOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { if (!found) { found = true; value = x; } else throw new Error("single:sequence contains more than one element."); }); return (!found) ? defaultValue : value; }; Enumerable.prototype.skip = function (count) { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); while (index++ < count && enumerator.moveNext()) { } ; }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.skipWhile = function (predicate) { predicate = Utils.createLambda(predicate); var source = this; return new Enumerable(function () { var enumerator; var index = 0; var isSkipEnd = false; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (!isSkipEnd) { if (enumerator.moveNext()) { if (!predicate(enumerator.current(), index++)) { isSkipEnd = true; return this.yieldReturn(enumerator.current()); } continue; } else return false; } return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.take = function (count) { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (index++ < count && enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); } ); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.takeWhile = function (predicate) { predicate = Utils.createLambda(predicate); var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext() && predicate(enumerator.current(), index++)) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function() // Overload:function(count) Enumerable.prototype.takeExceptLast = function (count) { if (count == null) count = 1; var source = this; return new Enumerable(function () { if (count <= 0) return source.getEnumerator(); // do nothing var enumerator; var q = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (q.length == count) { q.push(enumerator.current()); return this.yieldReturn(q.shift()); } q.push(enumerator.current()); } return false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.takeFromLast = function (count) { if (count <= 0 || count == null) return Enumerable.empty(); var source = this; return new Enumerable(function () { var sourceEnumerator; var enumerator; var q = []; return new IEnumerator( function () { sourceEnumerator = source.getEnumerator(); }, function () { while (sourceEnumerator.moveNext()) { if (q.length == count) q.shift(); q.push(sourceEnumerator.current()); } if (enumerator == null) { enumerator = Enumerable.from(q).getEnumerator(); } return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(item) // Overload:function(predicate) Enumerable.prototype.indexOf = function (item) { var found = null; // item as predicate if (typeof (item) === Types.Function) { this.forEach(function (x, i) { if (item(x, i)) { found = i; return false; } }); } else { this.forEach(function (x, i) { if (x === item) { found = i; return false; } }); } return (found !== null) ? found : -1; }; // Overload:function(item) // Overload:function(predicate) Enumerable.prototype.lastIndexOf = function (item) { var result = -1; // item as predicate if (typeof (item) === Types.Function) { this.forEach(function (x, i) { if (item(x, i)) result = i; }); } else { this.forEach(function (x, i) { if (x === item) result = i; }); } return result; }; /* Convert Methods */ Enumerable.prototype.asEnumerable = function () { return Enumerable.from(this); }; Enumerable.prototype.toArray = function () { var array = []; this.forEach(function (x) { array.push(x); }); return array; }; // Overload:function(keySelector) // Overload:function(keySelector, elementSelector) // Overload:function(keySelector, elementSelector, compareSelector) Enumerable.prototype.toLookup = function (keySelector, elementSelector, compareSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var dict = new Dictionary(compareSelector); this.forEach(function (x) { var key = keySelector(x); var element = elementSelector(x); var array = dict.get(key); if (array !== undefined) array.push(element); else dict.add(key, [element]); }); return new Lookup(dict); }; Enumerable.prototype.toObject = function (keySelector, elementSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); var obj = {}; this.forEach(function (x) { obj[keySelector(x)] = elementSelector(x); }); return obj; }; // Overload:function(keySelector, elementSelector) // Overload:function(keySelector, elementSelector, compareSelector) Enumerable.prototype.toDictionary = function (keySelector, elementSelector, compareSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var dict = new Dictionary(compareSelector); this.forEach(function (x) { dict.add(keySelector(x), elementSelector(x)); }); return dict; }; // Overload:function() // Overload:function(replacer) // Overload:function(replacer, space) Enumerable.prototype.toJSONString = function (replacer, space) { if (typeof JSON === Types.Undefined || JSON.stringify == null) { throw new Error("toJSONString can't find JSON.stringify. This works native JSON support Browser or include json2.js"); } return JSON.stringify(this.toArray(), replacer, space); }; // Overload:function() // Overload:function(separator) // Overload:function(separator,selector) Enumerable.prototype.toJoinedString = function (separator, selector) { if (separator == null) separator = ""; if (selector == null) selector = Functions.Identity; return this.select(selector).toArray().join(separator); }; /* Action Methods */ // Overload:function(action<element>) // Overload:function(action<element,index>) Enumerable.prototype.doAction = function (action) { var source = this; action = Utils.createLambda(action); return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (enumerator.moveNext()) { action(enumerator.current(), index++); return this.yieldReturn(enumerator.current()); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(action<element>) // Overload:function(action<element,index>) // Overload:function(func<element,bool>) // Overload:function(func<element,index,bool>) Enumerable.prototype.forEach = function (action) { action = Utils.createLambda(action); var index = 0; var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { if (action(enumerator.current(), index++) === false) break; } } finally { Utils.dispose(enumerator); } }; // Overload:function() // Overload:function(separator) // Overload:function(separator,selector) Enumerable.prototype.write = function (separator, selector) { if (separator == null) separator = ""; selector = Utils.createLambda(selector); var isFirst = true; this.forEach(function (item) { if (isFirst) isFirst = false; else document.write(separator); document.write(selector(item)); }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.writeLine = function (selector) { selector = Utils.createLambda(selector); this.forEach(function (item) { document.writeln(selector(item) + "<br />"); }); }; Enumerable.prototype.force = function () { var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { } } finally { Utils.dispose(enumerator); } }; /* Functional Methods */ Enumerable.prototype.letBind = function (func) { func = Utils.createLambda(func); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = Enumerable.from(func(source)).getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.share = function () { var source = this; var sharedEnumerator; var disposed = false; return new DisposableEnumerable(function () { return new IEnumerator( function () { if (sharedEnumerator == null) { sharedEnumerator = source.getEnumerator(); } }, function () { if (disposed) throw new Error("enumerator is disposed"); return (sharedEnumerator.moveNext()) ? this.yieldReturn(sharedEnumerator.current()) : false; }, Functions.Blank ); }, function () { disposed = true; Utils.dispose(sharedEnumerator); }); }; Enumerable.prototype.memoize = function () { var source = this; var cache; var enumerator; var disposed = false; return new DisposableEnumerable(function () { var index = -1; return new IEnumerator( function () { if (enumerator == null) { enumerator = source.getEnumerator(); cache = []; } }, function () { if (disposed) throw new Error("enumerator is disposed"); index++; if (cache.length <= index) { return (enumerator.moveNext()) ? this.yieldReturn(cache[index] = enumerator.current()) : false; } return this.yieldReturn(cache[index]); }, Functions.Blank ); }, function () { disposed = true; Utils.dispose(enumerator); cache = null; }); }; /* Error Handling Methods */ Enumerable.prototype.catchError = function (handler) { handler = Utils.createLambda(handler); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { try { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; } catch (e) { handler(e); return false; } }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.finallyAction = function (finallyAction) { finallyAction = Utils.createLambda(finallyAction); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { try { Utils.dispose(enumerator); } finally { finallyAction(); } }); }); }; /* For Debug Methods */ // Overload:function() // Overload:function(selector) Enumerable.prototype.log = function (selector) { selector = Utils.createLambda(selector); return this.doAction(function (item) { if (typeof console !== Types.Undefined) { console.log(selector(item)); } }); }; // Overload:function() // Overload:function(message) // Overload:function(message,selector) Enumerable.prototype.trace = function (message, selector) { if (message == null) message = "Trace"; selector = Utils.createLambda(selector); return this.doAction(function (item) { if (typeof console !== Types.Undefined) { console.log(message, selector(item)); } }); }; // private var OrderedEnumerable = function (source, keySelector, descending, parent) { this.source = source; this.keySelector = Utils.createLambda(keySelector); this.descending = descending; this.parent = parent; }; OrderedEnumerable.prototype = new Enumerable(); OrderedEnumerable.prototype.createOrderedEnumerable = function (keySelector, descending) { return new OrderedEnumerable(this.source, keySelector, descending, this); }; OrderedEnumerable.prototype.thenBy = function (keySelector) { return this.createOrderedEnumerable(keySelector, false); }; OrderedEnumerable.prototype.thenByDescending = function (keySelector) { return this.createOrderedEnumerable(keySelector, true); }; OrderedEnumerable.prototype.getEnumerator = function () { var self = this; var buffer; var indexes; var index = 0; return new IEnumerator( function () { buffer = []; indexes = []; self.source.forEach(function (item, index) { buffer.push(item); indexes.push(index); }); var sortContext = SortContext.create(self, null); sortContext.GenerateKeys(buffer); indexes.sort(function (a, b) { return sortContext.compare(a, b); }); }, function () { return (index < indexes.length) ? this.yieldReturn(buffer[indexes[index++]]) : false; }, Functions.Blank ); }; var SortContext = function (keySelector, descending, child) { this.keySelector = keySelector; this.descending = descending; this.child = child; this.keys = null; }; SortContext.create = function (orderedEnumerable, currentContext) { var context = new SortContext(orderedEnumerable.keySelector, orderedEnumerable.descending, currentContext); if (orderedEnumerable.parent != null) return SortContext.create(orderedEnumerable.parent, context); return context; }; SortContext.prototype.GenerateKeys = function (source) { var len = source.length; var keySelector = this.keySelector; var keys = new Array(len); for (var i = 0; i < len; i++) keys[i] = keySelector(source[i]); this.keys = keys; if (this.child != null) this.child.GenerateKeys(source); }; SortContext.prototype.compare = function (index1, index2) { var comparison = Utils.compare(this.keys[index1], this.keys[index2]); if (comparison == 0) { if (this.child != null) return this.child.compare(index1, index2); return Utils.compare(index1, index2); } return (this.descending) ? -comparison : comparison; }; var DisposableEnumerable = function (getEnumerator, dispose) { this.dispose = dispose; Enumerable.call(this, getEnumerator); }; DisposableEnumerable.prototype = new Enumerable(); // optimize array or arraylike object var ArrayEnumerable = function (source) { this.getSource = function () { return source; }; }; ArrayEnumerable.prototype = new Enumerable(); ArrayEnumerable.prototype.any = function (predicate) { return (predicate == null) ? (this.getSource().length > 0) : Enumerable.prototype.any.apply(this, arguments); }; ArrayEnumerable.prototype.count = function (predicate) { return (predicate == null) ? this.getSource().length : Enumerable.prototype.count.apply(this, arguments); }; ArrayEnumerable.prototype.elementAt = function (index) { var source = this.getSource(); return (0 <= index && index < source.length) ? source[index] : Enumerable.prototype.elementAt.apply(this, arguments); }; ArrayEnumerable.prototype.elementAtOrDefault = function (index, defaultValue) { if (defaultValue === undefined) defaultValue = null; var source = this.getSource(); return (0 <= index && index < source.length) ? source[index] : defaultValue; }; ArrayEnumerable.prototype.first = function (predicate) { var source = this.getSource(); return (predicate == null && source.length > 0) ? source[0] : Enumerable.prototype.first.apply(this, arguments); }; ArrayEnumerable.prototype.firstOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) { return Enumerable.prototype.firstOrDefault.apply(this, arguments); } var source = this.getSource(); return source.length > 0 ? source[0] : defaultValue; }; ArrayEnumerable.prototype.last = function (predicate) { var source = this.getSource(); return (predicate == null && source.length > 0) ? source[source.length - 1] : Enumerable.prototype.last.apply(this, arguments); }; ArrayEnumerable.prototype.lastOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) { return Enumerable.prototype.lastOrDefault.apply(this, arguments); } var source = this.getSource(); return source.length > 0 ? source[source.length - 1] : defaultValue; }; ArrayEnumerable.prototype.skip = function (count) { var source = this.getSource(); return new Enumerable(function () { var index; return new IEnumerator( function () { index = (count < 0) ? 0 : count; }, function () { return (index < source.length) ? this.yieldReturn(source[index++]) : false; }, Functions.Blank); }); }; ArrayEnumerable.prototype.takeExceptLast = function (count) { if (count == null) count = 1; return this.take(this.getSource().length - count); }; ArrayEnumerable.prototype.takeFromLast = function (count) { return this.skip(this.getSource().length - count); }; ArrayEnumerable.prototype.reverse = function () { var source = this.getSource(); return new Enumerable(function () { var index; return new IEnumerator( function () { index = source.length; }, function () { return (index > 0) ? this.yieldReturn(source[--index]) : false; }, Functions.Blank); }); }; ArrayEnumerable.prototype.sequenceEqual = function (second, compareSelector) { if ((second instanceof ArrayEnumerable || second instanceof Array) && compareSelector == null && Enumerable.from(second).count() != this.count()) { return false; } return Enumerable.prototype.sequenceEqual.apply(this, arguments); }; ArrayEnumerable.prototype.toJoinedString = function (separator, selector) { var source = this.getSource(); if (selector != null || !(source instanceof Array)) { return Enumerable.prototype.toJoinedString.apply(this, arguments); } if (separator == null) separator = ""; return source.join(separator); }; ArrayEnumerable.prototype.getEnumerator = function () { var source = this.getSource(); var index = -1; // fast and simple enumerator return { current: function () { return source[index]; }, moveNext: function () { return ++index < source.length; }, dispose: Functions.Blank }; }; // optimization for multiple where and multiple select and whereselect var WhereEnumerable = function (source, predicate) { this.prevSource = source; this.prevPredicate = predicate; // predicate.length always <= 1 }; WhereEnumerable.prototype = new Enumerable(); WhereEnumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); if (predicate.length <= 1) { var prevPredicate = this.prevPredicate; var composedPredicate = function (x) { return prevPredicate(x) && predicate(x); }; return new WhereEnumerable(this.prevSource, composedPredicate); } else { // if predicate use index, can't compose return Enumerable.prototype.where.call(this, predicate); } }; WhereEnumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); return (selector.length <= 1) ? new WhereSelectEnumerable(this.prevSource, this.prevPredicate, selector) : Enumerable.prototype.select.call(this, selector); }; WhereEnumerable.prototype.getEnumerator = function () { var predicate = this.prevPredicate; var source = this.prevSource; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate(enumerator.current())) { return this.yieldReturn(enumerator.current()); } } return false; }, function () { Utils.dispose(enumerator); }); }; var WhereSelectEnumerable = function (source, predicate, selector) { this.prevSource = source; this.prevPredicate = predicate; // predicate.length always <= 1 or null this.prevSelector = selector; // selector.length always <= 1 }; WhereSelectEnumerable.prototype = new Enumerable(); WhereSelectEnumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); return (predicate.length <= 1) ? new WhereEnumerable(this, predicate) : Enumerable.prototype.where.call(this, predicate); }; WhereSelectEnumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); if (selector.length <= 1) { var prevSelector = this.prevSelector; var composedSelector = function (x) { return selector(prevSelector(x)); }; return new WhereSelectEnumerable(this.prevSource, this.prevPredicate, composedSelector); } else { // if selector use index, can't compose return Enumerable.prototype.select.call(this, selector); } }; WhereSelectEnumerable.prototype.getEnumerator = function () { var predicate = this.prevPredicate; var selector = this.prevSelector; var source = this.prevSource; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate == null || predicate(enumerator.current())) { return this.yieldReturn(selector(enumerator.current())); } } return false; }, function () { Utils.dispose(enumerator); }); }; // Collections var Dictionary = (function () { // static utility methods var callHasOwnProperty = function (target, key) { return Object.prototype.hasOwnProperty.call(target, key); }; var computeHashCode = function (obj) { if (obj === null) return "null"; if (obj === undefined) return "undefined"; return (typeof obj.toString === Types.Function) ? obj.toString() : Object.prototype.toString.call(obj); }; // LinkedList for Dictionary var HashEntry = function (key, value) { this.key = key; this.value = value; this.prev = null; this.next = null; }; var EntryList = function () { this.first = null; this.last = null; }; EntryList.prototype = { addLast: function (entry) { if (this.last != null) { this.last.next = entry; entry.prev = this.last; this.last = entry; } else this.first = this.last = entry; }, replace: function (entry, newEntry) { if (entry.prev != null) { entry.prev.next = newEntry; newEntry.prev = entry.prev; } else this.first = newEntry; if (entry.next != null) { entry.next.prev = newEntry; newEntry.next = entry.next; } else this.last = newEntry; }, remove: function (entry) { if (entry.prev != null) entry.prev.next = entry.next; else this.first = entry.next; if (entry.next != null) entry.next.prev = entry.prev; else this.last = entry.prev; } }; // Overload:function() // Overload:function(compareSelector) var Dictionary = function (compareSelector) { this.countField = 0; this.entryList = new EntryList(); this.buckets = {}; // as Dictionary<string,List<object>> this.compareSelector = (compareSelector == null) ? Functions.Identity : compareSelector; }; Dictionary.prototype = { add: function (key, value) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); var entry = new HashEntry(key, value); if (callHasOwnProperty(this.buckets, hash)) { var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { this.entryList.replace(array[i], entry); array[i] = entry; return; } } array.push(entry); } else { this.buckets[hash] = [entry]; } this.countField++; this.entryList.addLast(entry); }, get: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return undefined; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { var entry = array[i]; if (this.compareSelector(entry.key) === compareKey) return entry.value; } return undefined; }, set: function (key, value) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (callHasOwnProperty(this.buckets, hash)) { var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { var newEntry = new HashEntry(key, value); this.entryList.replace(array[i], newEntry); array[i] = newEntry; return true; } } } return false; }, contains: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return false; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) return true; } return false; }, clear: function () { this.countField = 0; this.buckets = {}; this.entryList = new EntryList(); }, remove: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { this.entryList.remove(array[i]); array.splice(i, 1); if (array.length == 0) delete this.buckets[hash]; this.countField--; return; } } }, count: function () { return this.countField; }, toEnumerable: function () { var self = this; return new Enumerable(function () { var currentEntry; return new IEnumerator( function () { currentEntry = self.entryList.first; }, function () { if (currentEntry != null) { var result = { key: currentEntry.key, value: currentEntry.value }; currentEntry = currentEntry.next; return this.yieldReturn(result); } return false; }, Functions.Blank); }); } }; return Dictionary; })(); // dictionary = Dictionary<TKey, TValue[]> var Lookup = function (dictionary) { this.count = function () { return dictionary.count(); }; this.get = function (key) { return Enumerable.from(dictionary.get(key)); }; this.contains = function (key) { return dictionary.contains(key); }; this.toEnumerable = function () { return dictionary.toEnumerable().select(function (kvp) { return new Grouping(kvp.key, kvp.value); }); }; }; var Grouping = function (groupKey, elements) { this.key = function () { return groupKey; }; ArrayEnumerable.call(this, elements); }; Grouping.prototype = new ArrayEnumerable(); // module export if (typeof define === Types.Function && define.amd) { // AMD define("linqjs", [], function () { return Enumerable; }); } else if (typeof module !== Types.Undefined && module.exports) { // Node module.exports = Enumerable; } else { root.Enumerable = Enumerable; } })(this);
Java
//////////////////////////////////////////////////////////////////////////// // Module : movement_manager.h // Created : 02.10.2001 // Modified : 12.11.2003 // Author : Dmitriy Iassenev // Description : Movement manager //////////////////////////////////////////////////////////////////////////// #pragma once #include "ai_monster_space.h" #include "graph_engine_space.h" #include "game_graph_space.h" namespace MovementManager { enum EPathType; }; namespace DetailPathManager { enum EDetailPathType; }; template < typename _Graph, typename _VertexEvaluator, typename _vertex_id_type > class CBaseLocationSelector; template < typename _Graph, typename _VertexEvaluator, typename _vertex_id_type, typename _index_type > class CBasePathManager; template < typename _dist_type, typename _index_type, typename _iteration_type > struct SVertexType; template < typename _dist_type, typename _index_type, typename _iteration_type > struct SBaseParameters; template < typename _dist_type, typename _index_type, typename _iteration_type > struct SGameVertex; class CEnemyLocationPredictor; class CPatrolPathManager; class CDetailPathManager; class CPHMovementControl; class CGameGraph; class CLevelGraph; class CRestrictedObject; class CLocationManager; class CCustomMonster; namespace DetailPathManager { struct STravelPathPoint; }; class CLevelPathBuilder; class CDetailPathBuilder; class CMovementManager { private: friend class CLevelPathBuilder; friend class CDetailPathBuilder; protected: typedef MonsterSpace::SBoneRotation CBoneRotation; typedef MovementManager::EPathType EPathType; typedef DetailPathManager::STravelPathPoint CTravelPathPoint; typedef GraphEngineSpace::CBaseParameters CBaseParameters; typedef GraphEngineSpace::CGameVertexParams CGameVertexParams; typedef CBaseLocationSelector< CGameGraph, SGameVertex< float, u32, u32 >, u32 > CGameLocationSelector; typedef CBasePathManager< CGameGraph, SGameVertex< float, u32, u32 >, u32, u32 > CGamePathManager; typedef CBasePathManager< CLevelGraph, SBaseParameters< float, u32, u32 >, u32, u32 > CLevelPathManager; private: enum EPathState { ePathStateSelectGameVertex = u32(0), ePathStateBuildGamePath, ePathStateContinueGamePath, ePathStateSelectPatrolPoint, ePathStateBuildLevelPath, ePathStateContinueLevelPath, ePathStateBuildDetailPath, ePathStatePathVerification, ePathStatePathCompleted, ePathStateTeleport, ePathStateDummy = u32(-1), }; protected: typedef xr_vector<CObject*> NEAREST_OBJECTS; protected: NEAREST_OBJECTS m_nearest_objects; protected: float m_speed; public: CBoneRotation m_body; protected: bool m_path_actuality; private: EPathState m_path_state; EPathType m_path_type; bool m_enabled; Fvector m_on_disable_object_position; float m_old_desirable_speed; bool m_extrapolate_path; bool m_build_at_once; bool m_wait_for_distributed_computation; public: CGameVertexParams *m_base_game_selector; CBaseParameters *m_base_level_selector; CGameLocationSelector *m_game_location_selector; CGamePathManager *m_game_path_manager; CLevelPathManager *m_level_path_manager; CDetailPathManager *m_detail_path_manager; CPatrolPathManager *m_patrol_path_manager; CRestrictedObject *m_restricted_object; CLocationManager *m_location_manager; CLevelPathBuilder *m_level_path_builder; CDetailPathBuilder *m_detail_path_builder; CCustomMonster *m_object; private: void process_game_path (); void process_level_path (); void process_patrol_path (); #ifdef USE_FREE_IN_RESTRICTIONS void verify_detail_path (); #endif // USE_FREE_IN_RESTRICTIONS void apply_collision_hit (CPHMovementControl *movement_control); protected: virtual void teleport (u32 game_vertex_id); public: CMovementManager (CCustomMonster *object); virtual ~CMovementManager (); virtual void Load (LPCSTR caSection); virtual void reinit (); virtual void reload (LPCSTR caSection); virtual BOOL net_Spawn (CSE_Abstract* data); virtual void net_Destroy (); virtual void on_frame (CPHMovementControl *movement_control, Fvector &dest_position); IC bool actual () const; bool actual_all () const; IC void set_path_type (EPathType path_type); void set_game_dest_vertex (const GameGraph::_GRAPH_ID &game_vertex_id); void set_level_dest_vertex (const u32 level_vertex_id); IC void set_build_path_at_once (); void enable_movement (bool enabled); EPathType path_type () const; GameGraph::_GRAPH_ID game_dest_vertex_id () const; u32 level_dest_vertex_id () const; IC bool enabled () const; IC bool path_completed () const; IC float old_desirable_speed () const; IC void set_desirable_speed (float speed); const xr_vector<CTravelPathPoint> &path () const; IC void set_body_orientation (const MonsterSpace::SBoneRotation &orientation); IC const CBoneRotation &body_orientation() const; void update_path (); virtual void move_along_path (CPHMovementControl *movement_control, Fvector &dest_position, float time_delta); IC float speed () const; float speed (CPHMovementControl *movement_control) const; virtual void on_travel_point_change (const u32 &previous_travel_point_index); virtual void on_build_path () {} template <typename T> IC bool accessible (T position_or_vertex_id, float radius = EPS_L) const; IC void extrapolate_path (bool value); IC bool extrapolate_path () const; bool distance_to_destination_greater (const float &distance_to_check) const; IC bool wait_for_distributed_computation () const; virtual bool can_use_distributed_compuations (u32 option) const; void clear_path (); public: IC CGameVertexParams *base_game_params () const; IC CBaseParameters *base_level_params () const; IC CGameLocationSelector &game_selector () const; IC CGamePathManager &game_path () const; IC CLevelPathManager &level_path () const; IC CDetailPathManager &detail () const; IC CPatrolPathManager &patrol () const; IC CRestrictedObject &restrictions () const; IC CLocationManager &locations () const; IC CCustomMonster &object () const; IC CLevelPathBuilder &level_path_builder () const; IC CDetailPathBuilder &detail_path_builder () const; public: virtual void on_restrictions_change (); }; #include "movement_manager_inline.h"
Java
import sbt._ object Version { val logbackVer = "1.2.3" val mUnitVer = "0.7.25" val scalaVersion = "3.0.0-RC3" } object Dependencies { private val logbackDeps = Seq ( "ch.qos.logback" % "logback-classic", ).map (_ % Version.logbackVer) private val munitDeps = Seq( "org.scalameta" %% "munit" % Version.mUnitVer % Test ) val dependencies: Seq[ModuleID] = logbackDeps ++ munitDeps val crossDependencies: Seq[ModuleID] = Seq.empty }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Frameset//EN""http://www.w3.org/TR/REC-html40/frameset.dtd"> <HTML> <HEAD> <meta name="generator" content="JDiff v1.0.9"> <!-- Generated by the JDiff Javadoc doclet --> <!-- (http://www.jdiff.org) --> <meta name="description" content="JDiff is a Javadoc doclet which generates an HTML report of all the packages, classes, constructors, methods, and fields which have been removed, added or changed in any way, including their documentation, when two APIs are compared."> <meta name="keywords" content="diff, jdiff, javadiff, java diff, java difference, API difference, difference between two APIs, API diff, Javadoc, doclet"> <TITLE> Constructor Additions Index </TITLE> <LINK REL="stylesheet" TYPE="text/css" HREF="../stylesheet-jdiff.css" TITLE="Style"> </HEAD> <BODY> <a NAME="topheader"></a> <table summary="Index for Constructors" width="100%" border="0" cellspacing="0" cellpadding="0"> <tr> <td bgcolor="#FFFFCC"> <font size="+1"><a href="constructors_index_all.html" class="staysblack">All Constructors</a></font> </td> </tr> <tr> <td bgcolor="#FFFFFF"> <FONT SIZE="-1"> <A HREF="constructors_index_removals.html" class="hiddenlink">Removals</A> </FONT> </td> </tr> <tr> <td bgcolor="#FFFFFF"> <FONT SIZE="-1"> <b>Additions</b> </FONT> </td> </tr> <tr> <td bgcolor="#FFFFFF"> <FONT SIZE="-1"> <A HREF="constructors_index_changes.html"class="hiddenlink">Changes</A> </FONT> </td> </tr> <tr> <td> <font size="-2"><b>Bold</b>&nbsp;is&nbsp;New,&nbsp;<strike>strike</strike>&nbsp;is&nbsp;deleted</font> </td> </tr> </table><br> <A NAME="C"></A> <br><font size="+2">C</font>&nbsp; <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <nobr><A HREF="org.apache.hadoop.io.compress.bzip2.CBZip2InputStream.html#org.apache.hadoop.io.compress.bzip2.CBZip2InputStream.ctor_added(java.io.InputStream, org.apache.hadoop.io.compress.SplittableCompressionCodec.READ_MODE)" class="hiddenlink" target="rightframe"><b>CBZip2InputStream</b> (<code>InputStream, READ_MODE</code>)</A></nobr>&nbsp;constructor<br> <A NAME="F"></A> <br><font size="+2">F</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <i>FsPermission</i><br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.fs.permission.FsPermission.html#org.apache.hadoop.fs.permission.FsPermission.ctor_added(java.lang.String)" class="hiddenlink" target="rightframe"><b>FsPermission</b> (<code>String</code>)</A></nobr>&nbsp;constructor<br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.fs.permission.FsPermission.html#org.apache.hadoop.fs.permission.FsPermission.ctor_added(org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction, boolean)" class="hiddenlink" target="rightframe"><b>FsPermission</b> (<code>FsAction, FsAction, FsAction, boolean</code>)</A></nobr>&nbsp;constructor<br> <A NAME="H"></A> <br><font size="+2">H</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <i>HttpServer</i><br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.apache.hadoop.security.authorize.AccessControlList)" class="hiddenlink" target="rightframe"><b>HttpServer</b> (<code>String, String, int, boolean, Configuration, AccessControlList</code>)</A></nobr>&nbsp;constructor<br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.apache.hadoop.security.authorize.AccessControlList, org.mortbay.jetty.Connector)" class="hiddenlink" target="rightframe"><b>HttpServer</b> (<code>String, String, int, boolean, Configuration, AccessControlList, Connector</code>)</A></nobr>&nbsp;constructor<br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.mortbay.jetty.Connector)" class="hiddenlink" target="rightframe"><b>HttpServer</b> (<code>String, String, int, boolean, Configuration, Connector</code>)</A></nobr>&nbsp;constructor<br> <A NAME="I"></A> <br><font size="+2">I</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <i>InvalidJobConfException</i><br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.mapred.InvalidJobConfException.html#org.apache.hadoop.mapred.InvalidJobConfException.ctor_added(java.lang.String, java.lang.Throwable)" class="hiddenlink" target="rightframe"><b>InvalidJobConfException</b> (<code>String, Throwable</code>)</A></nobr>&nbsp;constructor<br> &nbsp;&nbsp;<nobr><A HREF="org.apache.hadoop.mapred.InvalidJobConfException.html#org.apache.hadoop.mapred.InvalidJobConfException.ctor_added(java.lang.Throwable)" class="hiddenlink" target="rightframe"><b>InvalidJobConfException</b> (<code>Throwable</code>)</A></nobr>&nbsp;constructor<br> <A NAME="P"></A> <br><font size="+2">P</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <nobr><A HREF="org.apache.hadoop.fs.Path.html#org.apache.hadoop.fs.Path.ctor_added(java.net.URI)" class="hiddenlink" target="rightframe"><b>Path</b> (<code>URI</code>)</A></nobr>&nbsp;constructor<br> <nobr><A HREF="org.apache.hadoop.util.ProcfsBasedProcessTree.html#org.apache.hadoop.util.ProcfsBasedProcessTree.ctor_added(java.lang.String, boolean)" class="hiddenlink" target="rightframe"><b>ProcfsBasedProcessTree</b> (<code>String, boolean</code>)</A></nobr>&nbsp;constructor<br> <A NAME="R"></A> <br><font size="+2">R</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <nobr><A HREF="org.apache.hadoop.ipc.RPC.Server.html#org.apache.hadoop.ipc.RPC.Server.ctor_added(java.lang.Object, org.apache.hadoop.conf.Configuration, java.lang.String, int, int, boolean, org.apache.hadoop.security.token.SecretManager)" class="hiddenlink" target="rightframe"><b>RPC.Server</b> (<code>Object, Configuration, String, int, int, boolean, SecretManager</code>)</A></nobr>&nbsp;constructor<br> <A NAME="S"></A> <br><font size="+2">S</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#Z"><font size="-2">Z</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <nobr><A HREF="org.apache.hadoop.ipc.Server.html#org.apache.hadoop.ipc.Server.ctor_added(java.lang.String, int, java.lang.Class, int, org.apache.hadoop.conf.Configuration, java.lang.String, org.apache.hadoop.security.token.SecretManager)" class="hiddenlink" target="rightframe"><b>Server</b> (<code>String, int, Class, int, Configuration, String, SecretManager</code>)</A></nobr>&nbsp;constructor<br> <nobr><A HREF="org.apache.hadoop.util.Shell.ShellCommandExecutor.html#org.apache.hadoop.util.Shell.ShellCommandExecutor.ctor_added(java.lang.String[], java.io.File, java.util.Map, long)" class="hiddenlink" target="rightframe"><b>Shell.ShellCommandExecutor</b> (<code>String[], File, Map, long</code>)</A></nobr>&nbsp;constructor<br> <A NAME="Z"></A> <br><font size="+2">Z</font>&nbsp; <a href="#C"><font size="-2">C</font></a> <a href="#F"><font size="-2">F</font></a> <a href="#H"><font size="-2">H</font></a> <a href="#I"><font size="-2">I</font></a> <a href="#P"><font size="-2">P</font></a> <a href="#R"><font size="-2">R</font></a> <a href="#S"><font size="-2">S</font></a> <a href="#topheader"><font size="-2">TOP</font></a> <br> <nobr><A HREF="org.apache.hadoop.io.compress.zlib.ZlibCompressor.html#org.apache.hadoop.io.compress.zlib.ZlibCompressor.ctor_added(org.apache.hadoop.conf.Configuration)" class="hiddenlink" target="rightframe"><b>ZlibCompressor</b> (<code>Configuration</code>)</A></nobr>&nbsp;constructor<br> </BODY> </HTML>
Java
package org.apereo.cas.ticket.code; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.ticket.ExpirationPolicy; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketFactory; import org.apereo.cas.ticket.UniqueTicketIdGenerator; import org.apereo.cas.util.DefaultUniqueTicketIdGenerator; /** * Default OAuth code factory. * * @author Jerome Leleu * @since 5.0.0 */ public class DefaultOAuthCodeFactory implements OAuthCodeFactory { /** Default instance for the ticket id generator. */ protected final UniqueTicketIdGenerator oAuthCodeIdGenerator; /** ExpirationPolicy for refresh tokens. */ protected final ExpirationPolicy expirationPolicy; public DefaultOAuthCodeFactory(final ExpirationPolicy expirationPolicy) { this(new DefaultUniqueTicketIdGenerator(), expirationPolicy); } public DefaultOAuthCodeFactory(final UniqueTicketIdGenerator refreshTokenIdGenerator, final ExpirationPolicy expirationPolicy) { this.oAuthCodeIdGenerator = refreshTokenIdGenerator; this.expirationPolicy = expirationPolicy; } @Override public OAuthCode create(final Service service, final Authentication authentication) { final String codeId = this.oAuthCodeIdGenerator.getNewTicketId(OAuthCode.PREFIX); return new OAuthCodeImpl(codeId, service, authentication, this.expirationPolicy); } @Override public <T extends TicketFactory> T get(final Class<? extends Ticket> clazz) { return (T) this; } }
Java
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeHighlighting.EditorBoundHighlightingPass; import com.intellij.codeHighlighting.HighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar; import com.intellij.concurrency.Job; import com.intellij.concurrency.JobLauncher; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.ex.ApplicationUtil; import com.intellij.openapi.application.impl.ApplicationImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.util.Functions; import com.intellij.util.containers.CollectionFactory; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashingStrategy; import com.intellij.util.ui.UIUtil; import it.unimi.dsi.fastutil.ints.Int2ObjectMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; final class PassExecutorService implements Disposable { static final Logger LOG = Logger.getInstance(PassExecutorService.class); private static final boolean CHECK_CONSISTENCY = ApplicationManager.getApplication().isUnitTestMode(); private final Map<ScheduledPass, Job<Void>> mySubmittedPasses = new ConcurrentHashMap<>(); private final Project myProject; private volatile boolean isDisposed; private final AtomicInteger nextAvailablePassId; // used to assign random id to a pass if not set PassExecutorService(@NotNull Project project) { myProject = project; nextAvailablePassId = ((TextEditorHighlightingPassRegistrarImpl)TextEditorHighlightingPassRegistrar.getInstance(myProject)).getNextAvailableId(); } @Override public void dispose() { cancelAll(true); // some workers could, although idle, still retain some thread references for some time causing leak hunter to frown ForkJoinPool.commonPool().awaitQuiescence(1, TimeUnit.SECONDS); isDisposed = true; } void cancelAll(boolean waitForTermination) { for (Map.Entry<ScheduledPass, Job<Void>> entry : mySubmittedPasses.entrySet()) { Job<Void> job = entry.getValue(); ScheduledPass pass = entry.getKey(); pass.myUpdateProgress.cancel(); job.cancel(); } try { if (waitForTermination) { while (!waitFor(50)) { int i = 0; } } } catch (ProcessCanceledException ignored) { } catch (Error | RuntimeException e) { throw e; } catch (Throwable throwable) { LOG.error(throwable); } finally { mySubmittedPasses.clear(); } } void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap, // a list of opened FileEditors for each Document. The first FileEditor in the list is the preferred one @NotNull Map<Document, List<FileEditor>> documentToEditors, @NotNull DaemonProgressIndicator updateProgress) { if (isDisposed()) return; Map<FileEditor, List<TextEditorHighlightingPass>> documentBoundPasses = new HashMap<>(); Map<FileEditor, List<EditorBoundHighlightingPass>> editorBoundPasses = new HashMap<>(); Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass = new HashMap<>(); List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5); AtomicInteger threadsToStartCountdown = new AtomicInteger(0); for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) { FileEditor fileEditor = entry.getKey(); HighlightingPass[] passes = entry.getValue(); for (HighlightingPass pass : passes) { Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(30)); if (pass instanceof EditorBoundHighlightingPass) { EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass)pass; // have to make ids unique for this document assignUniqueId(editorPass, thisEditorId2Pass); editorBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(editorPass); } else if (pass instanceof TextEditorHighlightingPass) { TextEditorHighlightingPass tePass = (TextEditorHighlightingPass)pass; assignUniqueId(tePass, thisEditorId2Pass); documentBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(tePass); } else { // generic HighlightingPass, run all of them concurrently freePasses.add(new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown)); } } } List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10); // fileEditor-> (passId -> created pass) Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted = new HashMap<>(passesMap.size()); for (Map.Entry<Document, List<FileEditor>> entry : documentToEditors.entrySet()) { List<FileEditor> fileEditors = entry.getValue(); FileEditor preferredFileEditor = fileEditors.get(0); // assumption: the preferred fileEditor is stored first List<TextEditorHighlightingPass> passes = documentBoundPasses.get(preferredFileEditor); if (passes == null || passes.isEmpty()) { continue; } sortById(passes); for (TextEditorHighlightingPass pass : passes) { createScheduledPass(preferredFileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } for (Map.Entry<FileEditor, List<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) { FileEditor fileEditor = entry.getKey(); Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue(); for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) { createScheduledPass(fileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } if (CHECK_CONSISTENCY && !ApplicationManagerEx.isInStressTest()) { assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown); } if (LOG.isDebugEnabled()) { Set<VirtualFile> vFiles = ContainerUtil.map2Set(passesMap.keySet(), FileEditor::getFile); log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses); } for (ScheduledPass dependentPass : dependentPasses) { mySubmittedPasses.put(dependentPass, Job.nullJob()); } for (ScheduledPass freePass : freePasses) { submit(freePass); } } private void assignUniqueId(@NotNull TextEditorHighlightingPass pass, @NotNull Int2ObjectMap<TextEditorHighlightingPass> id2Pass) { int id = pass.getId(); if (id == -1 || id == 0) { id = nextAvailablePassId.incrementAndGet(); pass.setId(id); } TextEditorHighlightingPass prevPass = id2Pass.put(id, pass); if (prevPass != null) { LOG.error("Duplicate pass id found: "+id+". Both passes returned the same getId(): "+prevPass+" ("+prevPass.getClass() +") and "+pass+" ("+pass.getClass()+")"); } } private void assertConsistency(@NotNull List<ScheduledPass> freePasses, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull AtomicInteger threadsToStartCountdown) { assert threadsToStartCountdown.get() == toBeSubmitted.values().stream().mapToInt(m->m.size()).sum(); Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits = CollectionFactory.createCustomHashingStrategyMap(new HashingStrategy<>() { @Override public int hashCode(@Nullable PassExecutorService.ScheduledPass sp) { if (sp == null) return 0; return ((TextEditorHighlightingPass)sp.myPass).getId() * 31 + sp.myFileEditor.hashCode(); } @Override public boolean equals(@Nullable PassExecutorService.ScheduledPass sp1, @Nullable PassExecutorService.ScheduledPass sp2) { if (sp1 == null || sp2 == null) return sp1 == sp2; int id1 = ((TextEditorHighlightingPass)sp1.myPass).getId(); int id2 = ((TextEditorHighlightingPass)sp2.myPass).getId(); return id1 == id2 && sp1.myFileEditor == sp2.myFileEditor; } }); for (ScheduledPass freePass : freePasses) { HighlightingPass pass = freePass.myPass; if (pass instanceof TextEditorHighlightingPass) { id2Visits.put(freePass, Pair.create(freePass, 0)); checkConsistency(freePass, id2Visits); } } for (Map.Entry<ScheduledPass, Pair<ScheduledPass, Integer>> entry : id2Visits.entrySet()) { int count = entry.getValue().second; assert count == 0 : entry.getKey(); } assert id2Visits.size() == threadsToStartCountdown.get() : "Expected "+threadsToStartCountdown+" but got "+id2Visits.size()+": "+id2Visits; } private void checkConsistency(@NotNull ScheduledPass pass, Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits) { for (ScheduledPass succ : ContainerUtil.concat(pass.mySuccessorsOnCompletion, pass.mySuccessorsOnSubmit)) { Pair<ScheduledPass, Integer> succPair = id2Visits.get(succ); if (succPair == null) { succPair = Pair.create(succ, succ.myRunningPredecessorsCount.get()); id2Visits.put(succ, succPair); } int newPred = succPair.second - 1; id2Visits.put(succ, Pair.create(succ, newPred)); assert newPred >= 0; if (newPred == 0) { checkConsistency(succ, id2Visits); } } } @NotNull private ScheduledPass createScheduledPass(@NotNull FileEditor fileEditor, @NotNull TextEditorHighlightingPass pass, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown) { Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass = toBeSubmitted.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); int passId = pass.getId(); ScheduledPass scheduledPass = thisEditorId2ScheduledPass.get(passId); if (scheduledPass != null) return scheduledPass; scheduledPass = new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown); threadsToStartCountdown.incrementAndGet(); thisEditorId2ScheduledPass.put(passId, scheduledPass); for (int predecessorId : pass.getCompletionPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnCompletion(scheduledPass); } } for (int predecessorId : pass.getStartingPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnSubmit(scheduledPass); } } if (scheduledPass.myRunningPredecessorsCount.get() == 0 && !freePasses.contains(scheduledPass)) { freePasses.add(scheduledPass); } else if (!dependentPasses.contains(scheduledPass)) { dependentPasses.add(scheduledPass); } if (pass.isRunIntentionPassAfter() && fileEditor instanceof TextEditor) { Editor editor = ((TextEditor)fileEditor).getEditor(); VirtualFile virtualFile = fileEditor.getFile(); PsiFile psiFile = virtualFile == null ? null : ReadAction.compute(() -> PsiManager.getInstance(myProject).findFile(virtualFile)); if (psiFile != null) { ShowIntentionsPass ip = new ShowIntentionsPass(psiFile, editor, false); assignUniqueId(ip, thisEditorId2Pass); ip.setCompletionPredecessorIds(new int[]{passId}); createScheduledPass(fileEditor, ip, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } return scheduledPass; } private ScheduledPass findOrCreatePredecessorPass(@NotNull FileEditor fileEditor, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger myThreadsToStartCountdown, int predecessorId, @NotNull Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass, @NotNull Int2ObjectMap<? extends TextEditorHighlightingPass> thisEditorId2Pass) { ScheduledPass predecessor = thisEditorId2ScheduledPass.get(predecessorId); if (predecessor == null) { TextEditorHighlightingPass textEditorPass = thisEditorId2Pass.get(predecessorId); predecessor = textEditorPass == null ? null : createScheduledPass(fileEditor, textEditorPass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, myThreadsToStartCountdown); } return predecessor; } private void submit(@NotNull ScheduledPass pass) { if (!pass.myUpdateProgress.isCanceled()) { Job<Void> job = JobLauncher.getInstance().submitToJobThread(pass, future -> { try { if (!future.isCancelled()) { // for canceled task .get() generates CancellationException which is expensive future.get(); } } catch (CancellationException | InterruptedException ignored) { } catch (ExecutionException e) { LOG.error(e.getCause()); } }); mySubmittedPasses.put(pass, job); } } private final class ScheduledPass implements Runnable { private final FileEditor myFileEditor; private final HighlightingPass myPass; private final AtomicInteger myThreadsToStartCountdown; private final AtomicInteger myRunningPredecessorsCount = new AtomicInteger(0); private final List<ScheduledPass> mySuccessorsOnCompletion = new ArrayList<>(); private final List<ScheduledPass> mySuccessorsOnSubmit = new ArrayList<>(); @NotNull private final DaemonProgressIndicator myUpdateProgress; private ScheduledPass(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator progressIndicator, @NotNull AtomicInteger threadsToStartCountdown) { myFileEditor = fileEditor; myPass = pass; myThreadsToStartCountdown = threadsToStartCountdown; myUpdateProgress = progressIndicator; } @Override public void run() { ((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(() -> { try { doRun(); } catch (ApplicationUtil.CannotRunReadActionException e) { myUpdateProgress.cancel(); } catch (RuntimeException | Error e) { saveException(e, myUpdateProgress); throw e; } }); } private void doRun() { if (myUpdateProgress.isCanceled()) return; log(myUpdateProgress, myPass, "Started. "); for (ScheduledPass successor : mySuccessorsOnSubmit) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } ProgressManager.getInstance().executeProcessUnderProgress(() -> { boolean success = ApplicationManagerEx.getApplicationEx().tryRunReadAction(() -> { try { if (DumbService.getInstance(myProject).isDumb() && !DumbService.isDumbAware(myPass)) { return; } if (!myUpdateProgress.isCanceled() && !myProject.isDisposed()) { myPass.collectInformation(myUpdateProgress); } } catch (ProcessCanceledException e) { log(myUpdateProgress, myPass, "Canceled "); if (!myUpdateProgress.isCanceled()) { myUpdateProgress.cancel(e); //in case when some smart asses throw PCE just for fun } } catch (RuntimeException | Error e) { myUpdateProgress.cancel(e); LOG.error(e); throw e; } }); if (!success) { myUpdateProgress.cancel(); } }, myUpdateProgress); log(myUpdateProgress, myPass, "Finished. "); if (!myUpdateProgress.isCanceled()) { applyInformationToEditorsLater(myFileEditor, myPass, myUpdateProgress, myThreadsToStartCountdown, ()->{ for (ScheduledPass successor : mySuccessorsOnCompletion) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } }); } } @NonNls @Override public String toString() { return "SP: " + myPass; } private void addSuccessorOnCompletion(@NotNull ScheduledPass successor) { mySuccessorsOnCompletion.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } private void addSuccessorOnSubmit(@NotNull ScheduledPass successor) { mySuccessorsOnSubmit.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } } private void applyInformationToEditorsLater(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown, @NotNull Runnable callbackOnApplied) { ApplicationManager.getApplication().invokeLater(() -> { if (isDisposed() || !fileEditor.isValid()) { updateProgress.cancel(); } if (updateProgress.isCanceled()) { log(updateProgress, pass, " is canceled during apply, sorry"); return; } try { if (UIUtil.isShowing(fileEditor.getComponent())) { pass.applyInformationToEditor(); repaintErrorStripeAndIcon(fileEditor); if (pass instanceof TextEditorHighlightingPass) { FileStatusMap fileStatusMap = DaemonCodeAnalyzerEx.getInstanceEx(myProject).getFileStatusMap(); Document document = ((TextEditorHighlightingPass)pass).getDocument(); int passId = ((TextEditorHighlightingPass)pass).getId(); fileStatusMap.markFileUpToDate(document, passId); } log(updateProgress, pass, " Applied"); } } catch (ProcessCanceledException e) { log(updateProgress, pass, "Error " + e); throw e; } catch (RuntimeException e) { VirtualFile file = fileEditor.getFile(); FileType fileType = file == null ? null : file.getFileType(); String message = "Exception while applying information to " + fileEditor + "("+fileType+")"; log(updateProgress, pass, message + e); throw new RuntimeException(message, e); } if (threadsToStartCountdown.decrementAndGet() == 0) { HighlightingSessionImpl.waitForAllSessionsHighlightInfosApplied(updateProgress); log(updateProgress, pass, "Stopping "); updateProgress.stopIfRunning(); clearStaleEntries(); } else { log(updateProgress, pass, "Finished but there are passes in the queue: " + threadsToStartCountdown.get()); } callbackOnApplied.run(); }, updateProgress.getModalityState(), pass.getExpiredCondition()); } private void clearStaleEntries() { mySubmittedPasses.keySet().removeIf(pass -> pass.myUpdateProgress.isCanceled()); } private void repaintErrorStripeAndIcon(@NotNull FileEditor fileEditor) { if (fileEditor instanceof TextEditor) { DefaultHighlightInfoProcessor.repaintErrorStripeAndIcon(((TextEditor)fileEditor).getEditor(), myProject); } } private boolean isDisposed() { return isDisposed || myProject.isDisposed(); } @NotNull List<HighlightingPass> getAllSubmittedPasses() { List<HighlightingPass> result = new ArrayList<>(mySubmittedPasses.size()); for (ScheduledPass scheduledPass : mySubmittedPasses.keySet()) { if (!scheduledPass.myUpdateProgress.isCanceled()) { result.add(scheduledPass.myPass); } } return result; } private static void sortById(@NotNull List<? extends TextEditorHighlightingPass> result) { ContainerUtil.quickSort(result, Comparator.comparingInt(TextEditorHighlightingPass::getId)); } private static int getThreadNum() { Matcher matcher = Pattern.compile("JobScheduler FJ pool (\\d*)/(\\d*)").matcher(Thread.currentThread().getName()); String num = matcher.matches() ? matcher.group(1) : null; return StringUtil.parseInt(num, 0); } static void log(ProgressIndicator progressIndicator, HighlightingPass pass, @NonNls Object @NotNull ... info) { if (LOG.isDebugEnabled()) { Document document = pass instanceof TextEditorHighlightingPass ? ((TextEditorHighlightingPass)pass).getDocument() : null; CharSequence docText = document == null ? "" : ": '" + StringUtil.first(document.getCharsSequence(), 10, true)+ "'"; synchronized (PassExecutorService.class) { String infos = StringUtil.join(info, Functions.TO_STRING(), " "); String message = StringUtil.repeatSymbol(' ', getThreadNum() * 4) + " " + pass + " " + infos + "; progress=" + (progressIndicator == null ? null : progressIndicator.hashCode()) + " " + (progressIndicator == null ? "?" : progressIndicator.isCanceled() ? "X" : "V") + docText; LOG.debug(message); //System.out.println(message); } } } private static final Key<Throwable> THROWABLE_KEY = Key.create("THROWABLE_KEY"); static void saveException(@NotNull Throwable e, @NotNull DaemonProgressIndicator indicator) { indicator.putUserDataIfAbsent(THROWABLE_KEY, e); } @TestOnly static Throwable getSavedException(@NotNull DaemonProgressIndicator indicator) { return indicator.getUserData(THROWABLE_KEY); } // return true if terminated boolean waitFor(int millis) throws Throwable { try { for (Job<Void> job : mySubmittedPasses.values()) { job.waitForCompletion(millis); } return true; } catch (TimeoutException ignored) { return false; } catch (InterruptedException e) { return true; } catch (ExecutionException e) { throw e.getCause(); } } }
Java
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Channel.V1.Snippets { // [START cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync] using Google.Cloud.Channel.V1; public sealed partial class GeneratedCloudChannelServiceClientSnippets { /// <summary>Snippet for DeleteCustomer</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void DeleteCustomerRequestObject() { // Create client CloudChannelServiceClient cloudChannelServiceClient = CloudChannelServiceClient.Create(); // Initialize request argument(s) DeleteCustomerRequest request = new DeleteCustomerRequest { CustomerName = CustomerName.FromAccountCustomer("[ACCOUNT]", "[CUSTOMER]"), }; // Make the request cloudChannelServiceClient.DeleteCustomer(request); } } // [END cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync] }
Java
###Build a starter map This lab covers the basics for creating a basic starter mapping application. The starter map simply loads a default base map, and centers and zooms it in in a [MapView](https://developers.arcgis.com/javascript/latest/api-reference/esri-views-MapView.html). If you are new to ArcGIS and need a full set of instructions on building a basic mapping application visit the [Getting Started with MapView](https://developers.arcgis.com/javascript/latest/sample-code/get-started-mapview/index.html) tutorial. 1. Copy and paste the code below into a new [jsbin.com](http://jsbin.com). ```html <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta name="viewport" content="initial-scale=1,maximum-scale=1,user-scalable=no"> <title>JS API Starter App</title> <link rel="stylesheet" href="https://js.arcgis.com/4.0/esri/css/main.css"> <style> html, body, #viewDiv { padding: 0; margin: 0; height: 100%; } </style> <script src="https://js.arcgis.com/4.0/"></script> <script> require([ "esri/Map", "esri/views/MapView", "dojo/domReady!" ], function(Map, MapView) { var map = new Map({ basemap: "dark-gray" }); var view = new MapView({ container: "viewDiv", map: map, center: [-122.68, 45.52], zoom: 10 }); }); </script> </head> <body> <div id="viewDiv"></div> </body> </html> ``` 2. The JSBin `Output` panel should show a dark-grey map centered on Portland, Oregon. Your app should look something like this: * [Code](index.html) * [Live App](https://esri.github.io/geodev-hackerlabs/develop/jsapi/create_starter_map/index.html) ###Bonus * Experiment with different basemaps such as `topo` or `gray`. * Declare the `view` variable globally instead and open your browser's javascript console ([see some instructions here](https://www.wickedlysmart.com/hfjsconsole/)). You can then interactively control the view from your browser console by referring to the `view` global variable. Many browsers will autocomplete once you've typed `view.`. For example, change the view extent, center point, zoom level or scale. See [here](https://developers.arcgis.com/javascript/latest/api-reference/esri-views-MapView.html) for some examples. **Hint:** If you're in a JS Bin, pop the Output into a separate window/tab to get direct access from the console. ``` javascript var view; // DECLARE the 'view' variable globally. require([ "esri/Map", "esri/views/MapView", "dojo/domReady!" ], function( Map, MapView) { ... view = new MapView({ // REMOVE the 'var' so we're setting the new global 'view' variable. container: "viewDiv", map: map, center: [-122.68, 45.52], zoom: 10 }); ``` Try changing the map's basemap by drilling down through the `view.map` property. E.g. `view.map.basemap = "streets"`. **Note:** You typically don't want to declare global variables like we do here, but it's good to know how to do it for debugging and exploring the API. Plus you're learning about JavaScript variable scope! * Run the code locally on your machine. Eventually if your app gets larger you'll want to migrate it from JSBin to your desktop.
Java
.container-fluid, .container { padding-right: 0px; padding-left: 0px; margin-right: auto; margin-left: auto; } .col-lg-1, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-md-1, .col-md-10, .col-md-11, .col-md-12, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-sm-1, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-xs-1, .col-xs-10, .col-xs-11, .col-xs-12, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9 { position: relative; min-height: 1px; padding-right: 0px; padding-left: 0px; } .wrap__content { position: relative; } /*big bootstrap overwrite*/ .row { margin-right: 0px; margin-left: 0px; } @media only screen and (min-width: 768px) { .container { width: inherit; } } @media only screen and (min-width: 992px) { .container { width: 970px; } } @media only screen and (min-width: 1200px) { .container { width: 1170px; } } /* Sticky footer styles -------------------------------------------------- */ html, body { height: 100%; background-color: white; font-family: 'Nunito', sans-serif; } /* Wrapper for page content to push down footer */ #wrap { min-height: 100%; height: auto; /* Negative indent footer by its height */ margin: 0 auto -100px; /* Pad bottom by footer height */ padding: 0 0 100px; } /* Set the fixed height of the footer here */ #footer { height: 100px; background-color: #DC73FF; }
Java
/* * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.client.runtime.subsystem.elytron.wizardpassword; public enum PasswordState { CHOOSE_PASSWORD_TYPE, CONFIGURATION, REVIEW }
Java
# V1SecurityContextConstraintsList ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#types-kinds | [optional] **api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#resources | [optional] **metadata** | [**UnversionedListMeta**](UnversionedListMeta.md) | | [optional] **items** | [**list[V1SecurityContextConstraints]**](V1SecurityContextConstraints.md) | | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
Java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.portal.repository.impl; import org.apache.rave.exception.NotSupportedException; import org.apache.commons.lang3.StringUtils; import org.apache.rave.exception.DataSerializationException; import org.apache.rave.model.ApplicationData; import org.apache.rave.portal.model.JpaApplicationData; import org.apache.rave.portal.model.conversion.JpaApplicationDataConverter; import org.apache.rave.portal.repository.ApplicationDataRepository; import org.apache.rave.util.CollectionUtils; import org.apache.rave.util.JsonUtils; import org.json.JSONException; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Transactional; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityManager; import javax.persistence.Lob; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.apache.rave.persistence.jpa.util.JpaUtil.getSingleResult; import static org.apache.rave.persistence.jpa.util.JpaUtil.saveOrUpdate; @Repository public class JpaApplicationDataRepository implements ApplicationDataRepository { @PersistenceContext private EntityManager manager; @Autowired private JpaApplicationDataConverter converter; @Override public Class<? extends ApplicationData> getType() { return JpaApplicationData.class; } @Override public ApplicationData get(String id) { JpaSerializableApplicationData applicationData = (JpaSerializableApplicationData) manager.find(JpaApplicationData.class, Long.parseLong(id)); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } @Override @Transactional public JpaApplicationData save(ApplicationData item) { JpaApplicationData jpaAppData = converter.convert(item); JpaSerializableApplicationData jpaSerializableApplicationData = getJpaSerializableApplicationData(jpaAppData); jpaSerializableApplicationData.serializeData(); return saveOrUpdate(jpaSerializableApplicationData.getEntityId(), manager, jpaSerializableApplicationData); } @Override public void delete(ApplicationData item) { manager.remove(item instanceof JpaApplicationData ? item : get(item.getId())); } @Override public List<ApplicationData> getAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getLimitedList(int offset, int limit) { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public int getCountAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getApplicationData(List<String> userIds, String appId) { //if the call is only looking for data for a single user use the more efficient single user variant transparently if (userIds.size() == 1) { List<ApplicationData> data = new ArrayList<ApplicationData>(); ApplicationData applicationData = getApplicationData(userIds.get(0), appId); if (applicationData != null) { data.add(applicationData); } return data; } TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_IDS_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_IDS_PARAM, userIds); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); List<JpaSerializableApplicationData> results = query.getResultList(); for (JpaSerializableApplicationData applicationData : results) { applicationData.deserializeData(); } return CollectionUtils.<ApplicationData>toBaseTypedList(results); } @Override public JpaApplicationData getApplicationData(String personId, String appId) { TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_ID_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_ID_PARAM, personId); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); JpaSerializableApplicationData applicationData = getSingleResult(query.getResultList()); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } private JpaSerializableApplicationData getJpaSerializableApplicationData(JpaApplicationData applicationData) { if (applicationData instanceof JpaSerializableApplicationData) { return (JpaSerializableApplicationData) applicationData; } return new JpaSerializableApplicationData(applicationData.getEntityId(), applicationData.getUserId(), applicationData.getAppUrl(), applicationData.getData()); } /** * This class is here so that the details of the persistence strategy in use for serializing the appdata map to a * JSON string doesnt end up being reflected in any public API of the ApplicationData object itself. * <p/> * This allows the public API of this repository to deal in clean ApplicationData models, but under the covers it * uses this model for the actual persistence to the database. */ @Entity public static class JpaSerializableApplicationData extends JpaApplicationData { @Lob @Column(name = "serialized_data") private String serializedData; public JpaSerializableApplicationData() { super(); } public JpaSerializableApplicationData(Long entityId, String userId, String appUrl, Map<String, Object> data) { super(entityId, userId, appUrl, data); } public void serializeData() { Map<String, Object> data = this.getData(); if (data != null) { serializedData = JsonUtils.stringify(data); } } @SuppressWarnings("unchecked") public void deserializeData() { if (serializedData != null && StringUtils.isNotBlank(serializedData)) { this.setData(JsonUtils.parse(serializedData, Map.class)); } } } }
Java
include ../../../mk/pitchfork.mk # Local variables _NAME = pbsvtools $(_NAME)_REPO ?= git://github.com/PacificBiosciences/$(_NAME) _WRKSRC = $(WORKDIR)/$(_NAME) $(_NAME)_VERSION ?= HEAD _REVISION = $(shell cd $(_WRKSRC) && $(GIT) rev-parse --short $($(_NAME)_VERSION) || true) # Local works do-fetch: $(_WRKSRC) $(_WRKSRC): ifeq ($(wildcard $($(_NAME)_REPO)),) $(GIT) clone $($(_NAME)_REPO) $@ cd $(_WRKSRC) && $(GIT) checkout $($(_NAME)_VERSION) DEVOPT = else ln -sfn $($(_NAME)_REPO) $(_WRKSRC) DEVOPT = -e endif do-install: $(PREFIX)/var/pkg/$(_NAME) $(PREFIX)/var/pkg/$(_NAME): | do-fetch $(PIP) install --no-deps $(DEVOPT) $(_WRKSRC)/ @echo pip uninstall $(_NAME) > $@ @echo "# $(_REVISION)" >> $@ do-clean: do-distclean: cd $(_WRKSRC) && $(GIT) clean -xdf || true do-flush: rm -rf $(_WRKSRC)
Java
# Copyright 2017 Battelle Energy Alliance, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Created on Jan 21, 2020 @author: alfoa, wangc Lasso model fit with Lars using BIC or AIC for model selection. """ #Internal Modules (Lazy Importer)-------------------------------------------------------------------- #Internal Modules (Lazy Importer) End---------------------------------------------------------------- #External Modules------------------------------------------------------------------------------------ from numpy import finfo #External Modules End-------------------------------------------------------------------------------- #Internal Modules------------------------------------------------------------------------------------ from SupervisedLearning.ScikitLearn import ScikitLearnBase from utils import InputData, InputTypes #Internal Modules End-------------------------------------------------------------------------------- class LassoLarsIC(ScikitLearnBase): """ Lasso model fit with Lars using BIC or AIC for model selection """ info = {'problemtype':'regression', 'normalize':False} def __init__(self): """ Constructor that will appropriately initialize a supervised learning object @ In, None @ Out, None """ super().__init__() import sklearn import sklearn.linear_model self.model = sklearn.linear_model.LassoLarsIC @classmethod def getInputSpecification(cls): """ Method to get a reference to a class that specifies the input data for class cls. @ In, cls, the class for which we are retrieving the specification @ Out, inputSpecification, InputData.ParameterInput, class to use for specifying input of cls. """ specs = super(LassoLarsIC, cls).getInputSpecification() specs.description = r"""The \xmlNode{LassoLarsIC} (\textit{Lasso model fit with Lars using BIC or AIC for model selection}) is a Lasso model fit with Lars using BIC or AIC for model selection. The optimization objective for Lasso is: $(1 / (2 * n\_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1$ AIC is the Akaike information criterion and BIC is the Bayes Information criterion. Such criteria are useful to select the value of the regularization parameter by making a trade-off between the goodness of fit and the complexity of the model. A good model should explain well the data while being simple. \zNormalizationNotPerformed{LassoLarsIC} """ specs.addSub(InputData.parameterInputFactory("criterion", contentType=InputTypes.makeEnumType("criterion", "criterionType",['bic', 'aic']), descr=r"""The type of criterion to use.""", default='aic')) specs.addSub(InputData.parameterInputFactory("fit_intercept", contentType=InputTypes.BoolType, descr=r"""Whether the intercept should be estimated or not. If False, the data is assumed to be already centered.""", default=True)) specs.addSub(InputData.parameterInputFactory("normalize", contentType=InputTypes.BoolType, descr=r"""This parameter is ignored when fit_intercept is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm.""", default=True)) specs.addSub(InputData.parameterInputFactory("max_iter", contentType=InputTypes.IntegerType, descr=r"""The maximum number of iterations.""", default=500)) specs.addSub(InputData.parameterInputFactory("precompute", contentType=InputTypes.StringType, descr=r"""Whether to use a precomputed Gram matrix to speed up calculations. For sparse input this option is always True to preserve sparsity.""", default='auto')) specs.addSub(InputData.parameterInputFactory("eps", contentType=InputTypes.FloatType, descr=r"""The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the tol parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization.""", default=finfo(float).eps)) specs.addSub(InputData.parameterInputFactory("positive", contentType=InputTypes.BoolType, descr=r"""When set to True, forces the coefficients to be positive.""", default=False)) specs.addSub(InputData.parameterInputFactory("verbose", contentType=InputTypes.BoolType, descr=r"""Amount of verbosity.""", default=False)) return specs def _handleInput(self, paramInput): """ Function to handle the common parts of the distribution parameter input. @ In, paramInput, ParameterInput, the already parsed input. @ Out, None """ super()._handleInput(paramInput) settings, notFound = paramInput.findNodesAndExtractValues(['fit_intercept','max_iter', 'normalize', 'precompute', 'eps','positive','criterion', 'verbose']) # notFound must be empty assert(not notFound) self.initializeModel(settings)
Java
/* * Copyright 2015 John Ahlroos * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.tabsheet; import com.vaadin.shared.ui.tabsheet.TabsheetState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DDLayoutState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DragAndDropAwareState; public class DDTabSheetState extends TabsheetState implements DragAndDropAwareState { public static final float DEFAULT_HORIZONTAL_DROP_RATIO = 0.2f; public float tabLeftRightDropRatio = DEFAULT_HORIZONTAL_DROP_RATIO; public DDLayoutState ddState = new DDLayoutState(); @Override public DDLayoutState getDragAndDropState() { return ddState; } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fontbox.ttf; import java.io.IOException; /** * A table in a true type font. * * @author Ben Litchfield */ public class HorizontalMetricsTable extends TTFTable { /** * A tag that identifies this table type. */ public static final String TAG = "hmtx"; private int[] advanceWidth; private short[] leftSideBearing; private short[] nonHorizontalLeftSideBearing; private int numHMetrics; HorizontalMetricsTable(TrueTypeFont font) { super(font); } /** * This will read the required data from the stream. * * @param ttf The font that is being read. * @param data The stream to read the data from. * @throws IOException If there is an error reading the data. */ @Override void read(TrueTypeFont ttf, TTFDataStream data) throws IOException { HorizontalHeaderTable hHeader = ttf.getHorizontalHeader(); if (hHeader == null) { throw new IOException("Could not get hmtx table"); } numHMetrics = hHeader.getNumberOfHMetrics(); int numGlyphs = ttf.getNumberOfGlyphs(); int bytesRead = 0; advanceWidth = new int[ numHMetrics ]; leftSideBearing = new short[ numHMetrics ]; for( int i=0; i<numHMetrics; i++ ) { advanceWidth[i] = data.readUnsignedShort(); leftSideBearing[i] = data.readSignedShort(); bytesRead += 4; } int numberNonHorizontal = numGlyphs - numHMetrics; // handle bad fonts with too many hmetrics if (numberNonHorizontal < 0) { numberNonHorizontal = numGlyphs; } // make sure that table is never null and correct size, even with bad fonts that have no // "leftSideBearing" table although they should nonHorizontalLeftSideBearing = new short[numberNonHorizontal]; if (bytesRead < getLength()) { for( int i=0; i<numberNonHorizontal; i++ ) { if (bytesRead < getLength()) { nonHorizontalLeftSideBearing[i] = data.readSignedShort(); bytesRead += 2; } } } initialized = true; } /** * Returns the advance width for the given GID. * * @param gid GID */ public int getAdvanceWidth(int gid) { if (advanceWidth.length == 0) { return 250; } if (gid < numHMetrics) { return advanceWidth[gid]; } else { // monospaced fonts may not have a width for every glyph // the last one is for subsequent glyphs return advanceWidth[advanceWidth.length -1]; } } /** * Returns the left side bearing for the given GID. * * @param gid GID */ public int getLeftSideBearing(int gid) { if (leftSideBearing.length == 0) { return 0; } if (gid < numHMetrics) { return leftSideBearing[gid]; } else { return nonHorizontalLeftSideBearing[gid - numHMetrics]; } } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.asterix.external.library.java.base; import org.apache.asterix.external.api.IJObject; import org.apache.asterix.om.types.IAType; import org.apache.asterix.om.util.container.IObjectPool; public abstract class JComplexObject<T> implements IJObject<T> { protected IObjectPool<IJObject, IAType> pool; public void setPool(IObjectPool<IJObject, IAType> pool) { this.pool = pool; } }
Java
/* Copyright AppsCode Inc. and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( "fmt" "strings" ) const ( KindDeployment = "Deployment" KindReplicaSet = "ReplicaSet" KindReplicationController = "ReplicationController" KindStatefulSet = "StatefulSet" KindDaemonSet = "DaemonSet" KindPod = "Pod" KindPersistentVolumeClaim = "PersistentVolumeClaim" KindAppBinding = "AppBinding" KindDeploymentConfig = "DeploymentConfig" KindSecret = "Secret" ) // LocalTypedReference contains enough information to let you inspect or modify the referred object. type LocalTypedReference struct { // Kind of the referent. // More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds // +optional Kind string `json:"kind,omitempty" protobuf:"bytes,1,opt,name=kind"` // Name of the referent. // More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names // +optional Name string `json:"name,omitempty" protobuf:"bytes,2,opt,name=name"` // API version of the referent. // +optional APIVersion string `json:"apiVersion,omitempty" protobuf:"bytes,3,opt,name=apiVersion"` } func (workload *LocalTypedReference) Canonicalize() error { if workload.Name == "" || workload.Kind == "" { return fmt.Errorf("missing workload name or kind") } switch strings.ToLower(workload.Kind) { case "deployments", "deployment", "deploy": workload.Kind = KindDeployment case "replicasets", "replicaset", "rs": workload.Kind = KindReplicaSet case "replicationcontrollers", "replicationcontroller", "rc": workload.Kind = KindReplicationController case "statefulsets", "statefulset": workload.Kind = KindStatefulSet case "daemonsets", "daemonset", "ds": workload.Kind = KindDaemonSet default: return fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind) } return nil } func (workload LocalTypedReference) GetRepositoryCRDName(podName, nodeName string) string { name := "" switch workload.Kind { case KindDeployment, KindReplicaSet, KindReplicationController: name = strings.ToLower(workload.Kind) + "." + workload.Name case KindStatefulSet: name = strings.ToLower(workload.Kind) + "." + podName case KindDaemonSet: name = strings.ToLower(workload.Kind) + "." + workload.Name + "." + nodeName } return name } func (workload LocalTypedReference) HostnamePrefix(podName, nodeName string) (hostname, prefix string, err error) { if err := workload.Canonicalize(); err != nil { return "", "", err } if workload.Name == "" || workload.Kind == "" { return "", "", fmt.Errorf("missing workload name or kind") } switch workload.Kind { case KindDeployment, KindReplicaSet, KindReplicationController: return workload.Name, strings.ToLower(workload.Kind) + "/" + workload.Name, nil case KindStatefulSet: if podName == "" { return "", "", fmt.Errorf("missing podName for %s", KindStatefulSet) } return podName, strings.ToLower(workload.Kind) + "/" + podName, nil case KindDaemonSet: if nodeName == "" { return "", "", fmt.Errorf("missing nodeName for %s", KindDaemonSet) } return nodeName, strings.ToLower(workload.Kind) + "/" + workload.Name + "/" + nodeName, nil default: return "", "", fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind) } } func StatefulSetPodName(appName, podOrdinal string) (string, error) { if appName == "" || podOrdinal == "" { return "", fmt.Errorf("missing appName or podOrdinal") } return appName + "-" + podOrdinal, nil }
Java
package com.cloudhopper.commons.charset.demo; /* * #%L * ch-commons-charset * %% * Copyright (C) 2012 Cloudhopper by Twitter * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.cloudhopper.commons.charset.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author joelauer */ public class Charset5Main { private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class); static public void main(String[] args) throws Exception { String sourceString = "h\u6025\u20ACllo"; String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8); logger.debug("source string: " + sourceString); logger.debug("target string: " + targetString); } }
Java
/* COPYRIGHT (c) 2014 Umut Acar, Arthur Chargueraud, and Michael * Rainey * All rights reserved. * * \file adjlist.hpp * \brief Adjacency-list graph format * */ #ifndef _PASL_GRAPH_ADJLIST_H_ #define _PASL_GRAPH_ADJLIST_H_ #include "../../graph/include/graph.hpp" /***********************************************************************/ namespace pasl { namespace graph { /*---------------------------------------------------------------------*/ /* Symmetric vertex */ template <class Vertex_id_bag> class symmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; symmetric_vertex() { } symmetric_vertex(vtxid_bag_type neighbors) : neighbors(neighbors) { } vtxid_bag_type neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type* get_in_neighbors() const { return neighbors.data(); } vtxid_type* get_out_neighbors() const { return neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(neighbors.size()); } void set_in_degree(vtxid_type j) { neighbors.alloc(j); } // todo: use neighbors.resize() void set_out_degree(vtxid_type j) { neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void check(vtxid_type nb_vertices) const { #ifndef NDEBUG for (vtxid_type i = 0; i < neighbors.size(); i++) check_vertex(neighbors[i], nb_vertices); #endif } }; /*---------------------------------------------------------------------*/ /* Asymmetric vertex */ template <class Vertex_id_bag> class asymmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; vtxid_bag_type in_neighbors; vtxid_bag_type out_neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return in_neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return out_neighbors[j]; } vtxid_type* get_in_neighbors() const { return in_neighbors.data(); } vtxid_type* get_out_neighbors() const { return out_neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { in_neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { out_neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(in_neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(out_neighbors.size()); } void set_in_degree(vtxid_type j) { in_neighbors.alloc(j); } void set_out_degree(vtxid_type j) { out_neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { in_neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { out_neighbors.swap(other); } void check(vtxid_type nb_vertices) const { for (vtxid_type i = 0; i < in_neighbors.size(); i++) check_vertex(in_neighbors[i], nb_vertices); for (vtxid_type i = 0; i < out_neighbors.size(); i++) check_vertex(out_neighbors[i], nb_vertices); } }; /*---------------------------------------------------------------------*/ /* Adjacency-list format */ template <class Adjlist_seq> class adjlist { public: typedef Adjlist_seq adjlist_seq_type; typedef typename adjlist_seq_type::value_type vertex_type; typedef typename vertex_type::vtxid_bag_type::value_type vtxid_type; typedef typename adjlist_seq_type::alias_type adjlist_seq_alias_type; typedef adjlist<adjlist_seq_alias_type> alias_type; edgeid_type nb_edges; adjlist_seq_type adjlists; adjlist() : nb_edges(0) { } adjlist(edgeid_type nb_edges) : nb_edges(nb_edges) { } vtxid_type get_nb_vertices() const { return vtxid_type(adjlists.size()); } void check() const { #ifndef NDEBUG for (vtxid_type i = 0; i < adjlists.size(); i++) adjlists[i].check(get_nb_vertices()); size_t m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_in_degree(); assert(m == nb_edges); m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_out_degree(); assert(m == nb_edges); #endif } }; /*---------------------------------------------------------------------*/ /* Equality operators */ template <class Vertex_id_bag> bool operator==(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { using vtxid_type = typename symmetric_vertex<Vertex_id_bag>::vtxid_type; if (v1.get_out_degree() != v2.get_out_degree()) return false; for (vtxid_type i = 0; i < v1.get_out_degree(); i++) if (v1.get_out_neighbor(i) != v2.get_out_neighbor(i)) return false; return true; } template <class Vertex_id_bag> bool operator!=(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { return ! (v1 == v2); } template <class Adjlist_seq> bool operator==(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { using vtxid_type = typename adjlist<Adjlist_seq>::vtxid_type; if (g1.get_nb_vertices() != g2.get_nb_vertices()) return false; if (g1.nb_edges != g2.nb_edges) return false; for (vtxid_type i = 0; i < g1.get_nb_vertices(); i++) if (g1.adjlists[i] != g2.adjlists[i]) return false; return true; } template <class Adjlist_seq> bool operator!=(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { return ! (g1 == g2); } /*---------------------------------------------------------------------*/ /* Flat adjacency-list format */ template <class Vertex_id, bool Is_alias = false> class flat_adjlist_seq { public: typedef flat_adjlist_seq<Vertex_id> self_type; typedef Vertex_id vtxid_type; typedef size_t size_type; typedef data::pointer_seq<vtxid_type> vertex_seq_type; typedef symmetric_vertex<vertex_seq_type> value_type; typedef flat_adjlist_seq<vtxid_type, true> alias_type; char* underlying_array; vtxid_type* offsets; vtxid_type nb_offsets; vtxid_type* edges; flat_adjlist_seq() : underlying_array(NULL), offsets(NULL), nb_offsets(0), edges(NULL) { } flat_adjlist_seq(const flat_adjlist_seq& other) { if (Is_alias) { underlying_array = other.underlying_array; offsets = other.offsets; nb_offsets = other.nb_offsets; edges = other.edges; } else { util::atomic::die("todo"); } } //! \todo instead of using Is_alias, pass either ptr_seq or array_seq as underlying_array ~flat_adjlist_seq() { if (! Is_alias) clear(); } void get_alias(alias_type& alias) const { alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; } alias_type get_alias() const { alias_type alias; alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; return alias; } void clear() { if (underlying_array != NULL) data::myfree(underlying_array); offsets = NULL; edges = NULL; } vtxid_type degree(vtxid_type v) const { assert(v >= 0); assert(v < size()); return offsets[v + 1] - offsets[v]; } value_type operator[](vtxid_type ix) const { assert(ix >= 0); assert(ix < size()); return value_type(vertex_seq_type(&edges[offsets[ix]], degree(ix))); } vtxid_type size() const { return nb_offsets - 1; } void swap(self_type& other) { std::swap(underlying_array, other.underlying_array); std::swap(offsets, other.offsets); std::swap(nb_offsets, other.nb_offsets); std::swap(edges, other.edges); } void alloc(size_type) { util::atomic::die("unsupported"); } void init(char* bytes, vtxid_type nb_vertices, edgeid_type nb_edges) { nb_offsets = nb_vertices + 1; underlying_array = bytes; offsets = (vtxid_type*)bytes; edges = &offsets[nb_offsets]; } value_type* data() { util::atomic::die("unsupported"); return NULL; } }; template <class Vertex_id, bool Is_alias = false> using flat_adjlist = adjlist<flat_adjlist_seq<Vertex_id, Is_alias>>; template <class Vertex_id> using flat_adjlist_alias = flat_adjlist<Vertex_id, true>; } // end namespace } // end namespace /***********************************************************************/ #endif /*! _PASL_GRAPH_ADJLIST_H_ */
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="pt"> <head> <!-- Generated by javadoc (version 1.7.0_71) on Tue Jun 16 10:36:54 BRT 2015 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream (Apache OpenNLP Tools 1.6.0 API)</title> <meta name="date" content="2015-06-16"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream (Apache OpenNLP Tools 1.6.0 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../opennlp/tools/ml/model/SequenceStreamEventStream.html" title="class in opennlp.tools.ml.model">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?opennlp/tools/ml/model/class-use/SequenceStreamEventStream.html" target="_top">Frames</a></li> <li><a href="SequenceStreamEventStream.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream" class="title">Uses of Class<br>opennlp.tools.ml.model.SequenceStreamEventStream</h2> </div> <div class="classUseContainer">No usage of opennlp.tools.ml.model.SequenceStreamEventStream</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../opennlp/tools/ml/model/SequenceStreamEventStream.html" title="class in opennlp.tools.ml.model">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?opennlp/tools/ml/model/class-use/SequenceStreamEventStream.html" target="_top">Frames</a></li> <li><a href="SequenceStreamEventStream.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2015 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </body> </html>
Java
package org.anddev.andengine.opengl.texture; import java.util.*; import org.anddev.andengine.opengl.texture.source.*; import org.anddev.andengine.util.*; import org.anddev.andengine.opengl.texture.builder.*; import android.graphics.*; public class BuildableTexture extends Texture { private final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace; public BuildableTexture(final int n, final int n2) { super(n, n2, TextureOptions.DEFAULT, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final ITextureStateListener textureStateListener) { super(n, n2, TextureOptions.DEFAULT, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions) throws IllegalArgumentException { super(n, n2, textureOptions, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions, final ITextureStateListener textureStateListener) throws IllegalArgumentException { super(n, n2, textureOptions, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } @Deprecated @Override public TextureSourceWithLocation addTextureSource(final ITextureSource textureSource, final int n, final int n2) { return super.addTextureSource(textureSource, n, n2); } public void addTextureSource(final ITextureSource textureSource, final Callback<TextureSourceWithLocation> callback) { this.mTextureSourcesToPlace.add(new TextureSourceWithWithLocationCallback(textureSource, callback)); } public void build(final ITextureBuilder textureBuilder) throws ITextureBuilder.TextureSourcePackingException { textureBuilder.pack(this, this.mTextureSourcesToPlace); this.mTextureSourcesToPlace.clear(); this.mUpdateOnHardwareNeeded = true; } @Override public void clearTextureSources() { super.clearTextureSources(); this.mTextureSourcesToPlace.clear(); } public void removeTextureSource(final ITextureSource textureSource) { final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace = this.mTextureSourcesToPlace; for (int i = -1 + mTextureSourcesToPlace.size(); i >= 0; --i) { if (mTextureSourcesToPlace.get(i).mTextureSource == textureSource) { mTextureSourcesToPlace.remove(i); this.mUpdateOnHardwareNeeded = true; return; } } } public static class TextureSourceWithWithLocationCallback implements ITextureSource { private final Callback<TextureSourceWithLocation> mCallback; private final ITextureSource mTextureSource; public TextureSourceWithWithLocationCallback(final ITextureSource mTextureSource, final Callback<TextureSourceWithLocation> mCallback) { super(); this.mTextureSource = mTextureSource; this.mCallback = mCallback; } @Override public TextureSourceWithWithLocationCallback clone() { return null; } public Callback<TextureSourceWithLocation> getCallback() { return this.mCallback; } @Override public int getHeight() { return this.mTextureSource.getHeight(); } public ITextureSource getTextureSource() { return this.mTextureSource; } @Override public int getWidth() { return this.mTextureSource.getWidth(); } @Override public Bitmap onLoadBitmap() { return this.mTextureSource.onLoadBitmap(); } @Override public String toString() { return this.mTextureSource.toString(); } } }
Java
/** vim: et:ts=4:sw=4:sts=4 * @license RequireJS 2.1.15 Copyright (c) 2010-2014, The Dojo Foundation All Rights Reserved. * Available via the MIT or new BSD license. * see: http://github.com/jrburke/requirejs for details */ //Not using strict: uneven strict support in browsers, #392, and causes //problems with requirejs.exec()/transpiler plugins that may not be strict. /*jslint regexp: true, nomen: true, sloppy: true */ /*global window, navigator, document, importScripts, setTimeout, opera */ var requirejs, require, define; (function (global) { var req, s, head, baseElement, dataMain, src, interactiveScript, currentlyAddingScript, mainScript, subPath, version = '2.1.15', commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg, cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g, jsSuffixRegExp = /\.js$/, currDirRegExp = /^\.\//, op = Object.prototype, ostring = op.toString, hasOwn = op.hasOwnProperty, ap = Array.prototype, apsp = ap.splice, isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document), isWebWorker = !isBrowser && typeof importScripts !== 'undefined', //PS3 indicates loaded and complete, but need to wait for complete //specifically. Sequence is 'loading', 'loaded', execution, // then 'complete'. The UA check is unfortunate, but not sure how //to feature test w/o causing perf issues. readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ? /^complete$/ : /^(complete|loaded)$/, defContextName = '_', //Oh the tragedy, detecting opera. See the usage of isOpera for reason. isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]', contexts = {}, cfg = {}, globalDefQueue = [], useInteractive = false; function isFunction(it) { return ostring.call(it) === '[object Function]'; } function isArray(it) { return ostring.call(it) === '[object Array]'; } /** * Helper function for iterating over an array. If the func returns * a true value, it will break out of the loop. */ function each(ary, func) { if (ary) { var i; for (i = 0; i < ary.length; i += 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } /** * Helper function for iterating over an array backwards. If the func * returns a true value, it will break out of the loop. */ function eachReverse(ary, func) { if (ary) { var i; for (i = ary.length - 1; i > -1; i -= 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } function hasProp(obj, prop) { return hasOwn.call(obj, prop); } function getOwn(obj, prop) { return hasProp(obj, prop) && obj[prop]; } /** * Cycles over properties in an object and calls a function for each * property value. If the function returns a truthy value, then the * iteration is stopped. */ function eachProp(obj, func) { var prop; for (prop in obj) { if (hasProp(obj, prop)) { if (func(obj[prop], prop)) { break; } } } } /** * Simple function to mix in properties from source into target, * but only if target does not already have a property of the same name. */ function mixin(target, source, force, deepStringMixin) { if (source) { eachProp(source, function (value, prop) { if (force || !hasProp(target, prop)) { if (deepStringMixin && typeof value === 'object' && value && !isArray(value) && !isFunction(value) && !(value instanceof RegExp)) { if (!target[prop]) { target[prop] = {}; } mixin(target[prop], value, force, deepStringMixin); } else { target[prop] = value; } } }); } return target; } //Similar to Function.prototype.bind, but the 'this' object is specified //first, since it is easier to read/figure out what 'this' will be. function bind(obj, fn) { return function () { return fn.apply(obj, arguments); }; } function scripts() { return document.getElementsByTagName('script'); } function defaultOnError(err) { throw err; } //Allow getting a global that is expressed in //dot notation, like 'a.b.c'. function getGlobal(value) { if (!value) { return value; } var g = global; each(value.split('.'), function (part) { g = g[part]; }); return g; } /** * Constructs an error with a pointer to an URL with more information. * @param {String} id the error ID that maps to an ID on a web page. * @param {String} message human readable error. * @param {Error} [err] the original error, if there is one. * * @returns {Error} */ function makeError(id, msg, err, requireModules) { var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id); e.requireType = id; e.requireModules = requireModules; if (err) { e.originalError = err; } return e; } if (typeof define !== 'undefined') { //If a define is already in play via another AMD loader, //do not overwrite. return; } if (typeof requirejs !== 'undefined') { if (isFunction(requirejs)) { //Do not overwrite an existing requirejs instance. return; } cfg = requirejs; requirejs = undefined; } //Allow for a require config object if (typeof require !== 'undefined' && !isFunction(require)) { //assume it is a config object. cfg = require; require = undefined; } function newContext(contextName) { var inCheckLoaded, Module, context, handlers, checkLoadedTimeoutId, config = { //Defaults. Do not set a default for map //config to speed up normalize(), which //will run faster if there is no default. waitSeconds: 7, baseUrl: './', paths: {}, bundles: {}, pkgs: {}, shim: {}, config: {} }, registry = {}, //registry of just enabled modules, to speed //cycle breaking code when lots of modules //are registered, but not activated. enabledRegistry = {}, undefEvents = {}, defQueue = [], defined = {}, urlFetched = {}, bundlesMap = {}, requireCounter = 1, unnormalizedCounter = 1; /** * Trims the . and .. from an array of path segments. * It will keep a leading path segment if a .. will become * the first path segment, to help with module name lookups, * which act like paths, but can be remapped. But the end result, * all paths that use this function should look normalized. * NOTE: this method MODIFIES the input array. * @param {Array} ary the array of path segments. */ function trimDots(ary) { var i, part; for (i = 0; i < ary.length; i++) { part = ary[i]; if (part === '.') { ary.splice(i, 1); i -= 1; } else if (part === '..') { // If at the start, or previous value is still .., // keep them so that when converted to a path it may // still work when converted to a path, even though // as an ID it is less than ideal. In larger point // releases, may be better to just kick out an error. if (i === 0 || (i == 1 && ary[2] === '..') || ary[i - 1] === '..') { continue; } else if (i > 0) { ary.splice(i - 1, 2); i -= 2; } } } } /** * Given a relative module name, like ./something, normalize it to * a real name that can be mapped to a path. * @param {String} name the relative name * @param {String} baseName a real name that the name arg is relative * to. * @param {Boolean} applyMap apply the map config to the value. Should * only be done if this normalization is for a dependency ID. * @returns {String} normalized name */ function normalize(name, baseName, applyMap) { var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex, foundMap, foundI, foundStarMap, starI, normalizedBaseParts, baseParts = (baseName && baseName.split('/')), map = config.map, starMap = map && map['*']; //Adjust any relative paths. if (name) { name = name.split('/'); lastIndex = name.length - 1; // If wanting node ID compatibility, strip .js from end // of IDs. Have to do this here, and not in nameToUrl // because node allows either .js or non .js to map // to same file. if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) { name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, ''); } // Starts with a '.' so need the baseName if (name[0].charAt(0) === '.' && baseParts) { //Convert baseName to array, and lop off the last part, //so that . matches that 'directory' and not name of the baseName's //module. For instance, baseName of 'one/two/three', maps to //'one/two/three.js', but we want the directory, 'one/two' for //this normalization. normalizedBaseParts = baseParts.slice(0, baseParts.length - 1); name = normalizedBaseParts.concat(name); } trimDots(name); name = name.join('/'); } //Apply map config if available. if (applyMap && map && (baseParts || starMap)) { nameParts = name.split('/'); outerLoop: for (i = nameParts.length; i > 0; i -= 1) { nameSegment = nameParts.slice(0, i).join('/'); if (baseParts) { //Find the longest baseName segment match in the config. //So, do joins on the biggest to smallest lengths of baseParts. for (j = baseParts.length; j > 0; j -= 1) { mapValue = getOwn(map, baseParts.slice(0, j).join('/')); //baseName segment has config, find if it has one for //this name. if (mapValue) { mapValue = getOwn(mapValue, nameSegment); if (mapValue) { //Match, update name to the new value. foundMap = mapValue; foundI = i; break outerLoop; } } } } //Check for a star map match, but just hold on to it, //if there is a shorter segment match later in a matching //config, then favor over this star map. if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) { foundStarMap = getOwn(starMap, nameSegment); starI = i; } } if (!foundMap && foundStarMap) { foundMap = foundStarMap; foundI = starI; } if (foundMap) { nameParts.splice(0, foundI, foundMap); name = nameParts.join('/'); } } // If the name points to a package's name, use // the package main instead. pkgMain = getOwn(config.pkgs, name); return pkgMain ? pkgMain : name; } function removeScript(name) { if (isBrowser) { each(scripts(), function (scriptNode) { if (scriptNode.getAttribute('data-requiremodule') === name && scriptNode.getAttribute('data-requirecontext') === context.contextName) { scriptNode.parentNode.removeChild(scriptNode); return true; } }); } } function hasPathFallback(id) { var pathConfig = getOwn(config.paths, id); if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) { //Pop off the first array value, since it failed, and //retry pathConfig.shift(); context.require.undef(id); //Custom require that does not do map translation, since //ID is "absolute", already mapped/resolved. context.makeRequire(null, { skipMap: true })([id]); return true; } } //Turns a plugin!resource to [plugin, resource] //with the plugin being undefined if the name //did not have a plugin prefix. function splitPrefix(name) { var prefix, index = name ? name.indexOf('!') : -1; if (index > -1) { prefix = name.substring(0, index); name = name.substring(index + 1, name.length); } return [prefix, name]; } /** * Creates a module mapping that includes plugin prefix, module * name, and path. If parentModuleMap is provided it will * also normalize the name via require.normalize() * * @param {String} name the module name * @param {String} [parentModuleMap] parent module map * for the module name, used to resolve relative names. * @param {Boolean} isNormalized: is the ID already normalized. * This is true if this call is done for a define() module ID. * @param {Boolean} applyMap: apply the map config to the ID. * Should only be true if this map is for a dependency. * * @returns {Object} */ function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) { var url, pluginModule, suffix, nameParts, prefix = null, parentName = parentModuleMap ? parentModuleMap.name : null, originalName = name, isDefine = true, normalizedName = ''; //If no name, then it means it is a require call, generate an //internal name. if (!name) { isDefine = false; name = '_@r' + (requireCounter += 1); } nameParts = splitPrefix(name); prefix = nameParts[0]; name = nameParts[1]; if (prefix) { prefix = normalize(prefix, parentName, applyMap); pluginModule = getOwn(defined, prefix); } //Account for relative paths if there is a base name. if (name) { if (prefix) { if (pluginModule && pluginModule.normalize) { //Plugin is loaded, use its normalize method. normalizedName = pluginModule.normalize(name, function (name) { return normalize(name, parentName, applyMap); }); } else { // If nested plugin references, then do not try to // normalize, as it will not normalize correctly. This // places a restriction on resourceIds, and the longer // term solution is not to normalize until plugins are // loaded and all normalizations to allow for async // loading of a loader plugin. But for now, fixes the // common uses. Details in #1131 normalizedName = name.indexOf('!') === -1 ? normalize(name, parentName, applyMap) : name; } } else { //A regular module. normalizedName = normalize(name, parentName, applyMap); //Normalized name may be a plugin ID due to map config //application in normalize. The map config values must //already be normalized, so do not need to redo that part. nameParts = splitPrefix(normalizedName); prefix = nameParts[0]; normalizedName = nameParts[1]; isNormalized = true; url = context.nameToUrl(normalizedName); } } //If the id is a plugin id that cannot be determined if it needs //normalization, stamp it with a unique ID so two matching relative //ids that may conflict can be separate. suffix = prefix && !pluginModule && !isNormalized ? '_unnormalized' + (unnormalizedCounter += 1) : ''; return { prefix: prefix, name: normalizedName, parentMap: parentModuleMap, unnormalized: !!suffix, url: url, originalName: originalName, isDefine: isDefine, id: (prefix ? prefix + '!' + normalizedName : normalizedName) + suffix }; } function getModule(depMap) { var id = depMap.id, mod = getOwn(registry, id); if (!mod) { mod = registry[id] = new context.Module(depMap); } return mod; } function on(depMap, name, fn) { var id = depMap.id, mod = getOwn(registry, id); if (hasProp(defined, id) && (!mod || mod.defineEmitComplete)) { if (name === 'defined') { fn(defined[id]); } } else { mod = getModule(depMap); if (mod.error && name === 'error') { fn(mod.error); } else { mod.on(name, fn); } } } function onError(err, errback) { var ids = err.requireModules, notified = false; if (errback) { errback(err); } else { each(ids, function (id) { var mod = getOwn(registry, id); if (mod) { //Set error on module, so it skips timeout checks. mod.error = err; if (mod.events.error) { notified = true; mod.emit('error', err); } } }); if (!notified) { req.onError(err); } } } /** * Internal method to transfer globalQueue items to this context's * defQueue. */ function takeGlobalQueue() { //Push all the globalDefQueue items into the context's defQueue if (globalDefQueue.length) { //Array splice in the values since the context code has a //local var ref to defQueue, so cannot just reassign the one //on context. apsp.apply(defQueue, [defQueue.length, 0].concat(globalDefQueue)); globalDefQueue = []; } } handlers = { 'require': function (mod) { if (mod.require) { return mod.require; } else { return (mod.require = context.makeRequire(mod.map)); } }, 'exports': function (mod) { mod.usingExports = true; if (mod.map.isDefine) { if (mod.exports) { return (defined[mod.map.id] = mod.exports); } else { return (mod.exports = defined[mod.map.id] = {}); } } }, 'module': function (mod) { if (mod.module) { return mod.module; } else { return (mod.module = { id: mod.map.id, uri: mod.map.url, config: function () { return getOwn(config.config, mod.map.id) || {}; }, exports: mod.exports || (mod.exports = {}) }); } } }; function cleanRegistry(id) { //Clean up machinery used for waiting modules. delete registry[id]; delete enabledRegistry[id]; } function breakCycle(mod, traced, processed) { var id = mod.map.id; if (mod.error) { mod.emit('error', mod.error); } else { traced[id] = true; each(mod.depMaps, function (depMap, i) { var depId = depMap.id, dep = getOwn(registry, depId); //Only force things that have not completed //being defined, so still in the registry, //and only if it has not been matched up //in the module already. if (dep && !mod.depMatched[i] && !processed[depId]) { if (getOwn(traced, depId)) { mod.defineDep(i, defined[depId]); mod.check(); //pass false? } else { breakCycle(dep, traced, processed); } } }); processed[id] = true; } } function checkLoaded() { var err, usingPathFallback, waitInterval = config.waitSeconds * 1000, //It is possible to disable the wait interval by using waitSeconds of 0. expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(), noLoads = [], reqCalls = [], stillLoading = false, needCycleCheck = true; //Do not bother if this call was a result of a cycle break. if (inCheckLoaded) { return; } inCheckLoaded = true; //Figure out the state of all the modules. eachProp(enabledRegistry, function (mod) { var map = mod.map, modId = map.id; //Skip things that are not enabled or in error state. if (!mod.enabled) { return; } if (!map.isDefine) { reqCalls.push(mod); } if (!mod.error) { //If the module should be executed, and it has not //been inited and time is up, remember it. if (!mod.inited && expired) { if (hasPathFallback(modId)) { usingPathFallback = true; stillLoading = true; } else { noLoads.push(modId); removeScript(modId); } } else if (!mod.inited && mod.fetched && map.isDefine) { stillLoading = true; if (!map.prefix) { //No reason to keep looking for unfinished //loading. If the only stillLoading is a //plugin resource though, keep going, //because it may be that a plugin resource //is waiting on a non-plugin cycle. return (needCycleCheck = false); } } } }); if (expired && noLoads.length) { //If wait time expired, throw error of unloaded modules. err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads); err.contextName = context.contextName; return onError(err); } //Not expired, check for a cycle. if (needCycleCheck) { each(reqCalls, function (mod) { breakCycle(mod, {}, {}); }); } //If still waiting on loads, and the waiting load is something //other than a plugin resource, or there are still outstanding //scripts, then just try back later. if ((!expired || usingPathFallback) && stillLoading) { //Something is still waiting to load. Wait for it, but only //if a timeout is not already in effect. if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) { checkLoadedTimeoutId = setTimeout(function () { checkLoadedTimeoutId = 0; checkLoaded(); }, 50); } } inCheckLoaded = false; } Module = function (map) { this.events = getOwn(undefEvents, map.id) || {}; this.map = map; this.shim = getOwn(config.shim, map.id); this.depExports = []; this.depMaps = []; this.depMatched = []; this.pluginMaps = {}; this.depCount = 0; /* this.exports this.factory this.depMaps = [], this.enabled, this.fetched */ }; Module.prototype = { init: function (depMaps, factory, errback, options) { options = options || {}; //Do not do more inits if already done. Can happen if there //are multiple define calls for the same module. That is not //a normal, common case, but it is also not unexpected. if (this.inited) { return; } this.factory = factory; if (errback) { //Register for errors on this module. this.on('error', errback); } else if (this.events.error) { //If no errback already, but there are error listeners //on this module, set up an errback to pass to the deps. errback = bind(this, function (err) { this.emit('error', err); }); } //Do a copy of the dependency array, so that //source inputs are not modified. For example //"shim" deps are passed in here directly, and //doing a direct modification of the depMaps array //would affect that config. this.depMaps = depMaps && depMaps.slice(0); this.errback = errback; //Indicate this module has be initialized this.inited = true; this.ignore = options.ignore; //Could have option to init this module in enabled mode, //or could have been previously marked as enabled. However, //the dependencies are not known until init is called. So //if enabled previously, now trigger dependencies as enabled. if (options.enabled || this.enabled) { //Enable this module and dependencies. //Will call this.check() this.enable(); } else { this.check(); } }, defineDep: function (i, depExports) { //Because of cycles, defined callback for a given //export can be called more than once. if (!this.depMatched[i]) { this.depMatched[i] = true; this.depCount -= 1; this.depExports[i] = depExports; } }, fetch: function () { if (this.fetched) { return; } this.fetched = true; context.startTime = (new Date()).getTime(); var map = this.map; //If the manager is for a plugin managed resource, //ask the plugin to load it now. if (this.shim) { context.makeRequire(this.map, { enableBuildCallback: true })(this.shim.deps || [], bind(this, function () { return map.prefix ? this.callPlugin() : this.load(); })); } else { //Regular dependency. return map.prefix ? this.callPlugin() : this.load(); } }, load: function () { var url = this.map.url; //Regular dependency. if (!urlFetched[url]) { urlFetched[url] = true; context.load(this.map.id, url); } }, /** * Checks if the module is ready to define itself, and if so, * define it. */ check: function () { if (!this.enabled || this.enabling) { return; } var err, cjsModule, id = this.map.id, depExports = this.depExports, exports = this.exports, factory = this.factory; if (!this.inited) { this.fetch(); } else if (this.error) { this.emit('error', this.error); } else if (!this.defining) { //The factory could trigger another require call //that would result in checking this module to //define itself again. If already in the process //of doing that, skip this work. this.defining = true; if (this.depCount < 1 && !this.defined) { if (isFunction(factory)) { //If there is an error listener, favor passing //to that instead of throwing an error. However, //only do it for define()'d modules. require //errbacks should not be called for failures in //their callbacks (#699). However if a global //onError is set, use that. if ((this.events.error && this.map.isDefine) || req.onError !== defaultOnError) { try { exports = context.execCb(id, factory, depExports, exports); } catch (e) { err = e; } } else { exports = context.execCb(id, factory, depExports, exports); } // Favor return value over exports. If node/cjs in play, // then will not have a return value anyway. Favor // module.exports assignment over exports object. if (this.map.isDefine && exports === undefined) { cjsModule = this.module; if (cjsModule) { exports = cjsModule.exports; } else if (this.usingExports) { //exports already set the defined value. exports = this.exports; } } if (err) { err.requireMap = this.map; err.requireModules = this.map.isDefine ? [this.map.id] : null; err.requireType = this.map.isDefine ? 'define' : 'require'; return onError((this.error = err)); } } else { //Just a literal value exports = factory; } this.exports = exports; if (this.map.isDefine && !this.ignore) { defined[id] = exports; if (req.onResourceLoad) { req.onResourceLoad(context, this.map, this.depMaps); } } //Clean up cleanRegistry(id); this.defined = true; } //Finished the define stage. Allow calling check again //to allow define notifications below in the case of a //cycle. this.defining = false; if (this.defined && !this.defineEmitted) { this.defineEmitted = true; this.emit('defined', this.exports); this.defineEmitComplete = true; } } }, callPlugin: function () { var map = this.map, id = map.id, //Map already normalized the prefix. pluginMap = makeModuleMap(map.prefix); //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(pluginMap); on(pluginMap, 'defined', bind(this, function (plugin) { var load, normalizedMap, normalizedMod, bundleId = getOwn(bundlesMap, this.map.id), name = this.map.name, parentName = this.map.parentMap ? this.map.parentMap.name : null, localRequire = context.makeRequire(map.parentMap, { enableBuildCallback: true }); //If current map is not normalized, wait for that //normalized name to load instead of continuing. if (this.map.unnormalized) { //Normalize the ID if the plugin allows it. if (plugin.normalize) { name = plugin.normalize(name, function (name) { return normalize(name, parentName, true); }) || ''; } //prefix and name should already be normalized, no need //for applying map config again either. normalizedMap = makeModuleMap(map.prefix + '!' + name, this.map.parentMap); on(normalizedMap, 'defined', bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true, ignore: true }); })); normalizedMod = getOwn(registry, normalizedMap.id); if (normalizedMod) { //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(normalizedMap); if (this.events.error) { normalizedMod.on('error', bind(this, function (err) { this.emit('error', err); })); } normalizedMod.enable(); } return; } //If a paths config, then just load that file instead to //resolve the plugin, as it is built into that paths layer. if (bundleId) { this.map.url = context.nameToUrl(bundleId); this.load(); return; } load = bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true }); }); load.error = bind(this, function (err) { this.inited = true; this.error = err; err.requireModules = [id]; //Remove temp unnormalized modules for this module, //since they will never be resolved otherwise now. eachProp(registry, function (mod) { if (mod.map.id.indexOf(id + '_unnormalized') === 0) { cleanRegistry(mod.map.id); } }); onError(err); }); //Allow plugins to load other code without having to know the //context or how to 'complete' the load. load.fromText = bind(this, function (text, textAlt) { /*jslint evil: true */ var moduleName = map.name, moduleMap = makeModuleMap(moduleName), hasInteractive = useInteractive; //As of 2.1.0, support just passing the text, to reinforce //fromText only being called once per resource. Still //support old style of passing moduleName but discard //that moduleName in favor of the internal ref. if (textAlt) { text = textAlt; } //Turn off interactive script matching for IE for any define //calls in the text, then turn it back on at the end. if (hasInteractive) { useInteractive = false; } //Prime the system by creating a module instance for //it. getModule(moduleMap); //Transfer any config to this other module. if (hasProp(config.config, id)) { config.config[moduleName] = config.config[id]; } try { req.exec(text); } catch (e) { return onError(makeError('fromtexteval', 'fromText eval for ' + id + ' failed: ' + e, e, [id])); } if (hasInteractive) { useInteractive = true; } //Mark this as a dependency for the plugin //resource this.depMaps.push(moduleMap); //Support anonymous modules. context.completeLoad(moduleName); //Bind the value of that module to the value for this //resource ID. localRequire([moduleName], load); }); //Use parentName here since the plugin's name is not reliable, //could be some weird string with no path that actually wants to //reference the parentName's path. plugin.load(map.name, localRequire, load, config); })); context.enable(pluginMap, this); this.pluginMaps[pluginMap.id] = pluginMap; }, enable: function () { enabledRegistry[this.map.id] = this; this.enabled = true; //Set flag mentioning that the module is enabling, //so that immediate calls to the defined callbacks //for dependencies do not trigger inadvertent load //with the depCount still being zero. this.enabling = true; //Enable each dependency each(this.depMaps, bind(this, function (depMap, i) { var id, mod, handler; if (typeof depMap === 'string') { //Dependency needs to be converted to a depMap //and wired up to this module. depMap = makeModuleMap(depMap, (this.map.isDefine ? this.map : this.map.parentMap), false, !this.skipMap); this.depMaps[i] = depMap; handler = getOwn(handlers, depMap.id); if (handler) { this.depExports[i] = handler(this); return; } this.depCount += 1; on(depMap, 'defined', bind(this, function (depExports) { this.defineDep(i, depExports); this.check(); })); if (this.errback) { on(depMap, 'error', bind(this, this.errback)); } } id = depMap.id; mod = registry[id]; //Skip special modules like 'require', 'exports', 'module' //Also, don't call enable if it is already enabled, //important in circular dependency cases. if (!hasProp(handlers, id) && mod && !mod.enabled) { context.enable(depMap, this); } })); //Enable each plugin that is used in //a dependency eachProp(this.pluginMaps, bind(this, function (pluginMap) { var mod = getOwn(registry, pluginMap.id); if (mod && !mod.enabled) { context.enable(pluginMap, this); } })); this.enabling = false; this.check(); }, on: function (name, cb) { var cbs = this.events[name]; if (!cbs) { cbs = this.events[name] = []; } cbs.push(cb); }, emit: function (name, evt) { each(this.events[name], function (cb) { cb(evt); }); if (name === 'error') { //Now that the error handler was triggered, remove //the listeners, since this broken Module instance //can stay around for a while in the registry. delete this.events[name]; } } }; function callGetModule(args) { //Skip modules already defined. if (!hasProp(defined, args[0])) { getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]); } } function removeListener(node, func, name, ieName) { //Favor detachEvent because of IE9 //issue, see attachEvent/addEventListener comment elsewhere //in this file. if (node.detachEvent && !isOpera) { //Probably IE. If not it will throw an error, which will be //useful to know. if (ieName) { node.detachEvent(ieName, func); } } else { node.removeEventListener(name, func, false); } } /** * Given an event from a script node, get the requirejs info from it, * and then removes the event listeners on the node. * @param {Event} evt * @returns {Object} */ function getScriptData(evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. var node = evt.currentTarget || evt.srcElement; //Remove the listeners once here. removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange'); removeListener(node, context.onScriptError, 'error'); return { node: node, id: node && node.getAttribute('data-requiremodule') }; } function intakeDefines() { var args; //Any defined modules in the global queue, intake them now. takeGlobalQueue(); //Make sure any remaining defQueue items get properly processed. while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' + args[args.length - 1])); } else { //args are id, deps, factory. Should be normalized by the //define() function. callGetModule(args); } } } context = { config: config, contextName: contextName, registry: registry, defined: defined, urlFetched: urlFetched, defQueue: defQueue, Module: Module, makeModuleMap: makeModuleMap, nextTick: req.nextTick, onError: onError, /** * Set a configuration for the context. * @param {Object} cfg config object to integrate. */ configure: function (cfg) { //Make sure the baseUrl ends in a slash. if (cfg.baseUrl) { if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') { cfg.baseUrl += '/'; } } //Save off the paths since they require special processing, //they are additive. var shim = config.shim, objs = { paths: true, bundles: true, config: true, map: true }; eachProp(cfg, function (value, prop) { if (objs[prop]) { if (!config[prop]) { config[prop] = {}; } mixin(config[prop], value, true, true); } else { config[prop] = value; } }); //Reverse map the bundles if (cfg.bundles) { eachProp(cfg.bundles, function (value, prop) { each(value, function (v) { if (v !== prop) { bundlesMap[v] = prop; } }); }); } //Merge shim if (cfg.shim) { eachProp(cfg.shim, function (value, id) { //Normalize the structure if (isArray(value)) { value = { deps: value }; } if ((value.exports || value.init) && !value.exportsFn) { value.exportsFn = context.makeShimExports(value); } shim[id] = value; }); config.shim = shim; } //Adjust packages if necessary. if (cfg.packages) { each(cfg.packages, function (pkgObj) { var location, name; pkgObj = typeof pkgObj === 'string' ? { name: pkgObj } : pkgObj; name = pkgObj.name; location = pkgObj.location; if (location) { config.paths[name] = pkgObj.location; } //Save pointer to main module ID for pkg name. //Remove leading dot in main, so main paths are normalized, //and remove any trailing .js, since different package //envs have different conventions: some use a module name, //some use a file name. config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main') .replace(currDirRegExp, '') .replace(jsSuffixRegExp, ''); }); } //If there are any "waiting to execute" modules in the registry, //update the maps for them, since their info, like URLs to load, //may have changed. eachProp(registry, function (mod, id) { //If module already has init called, since it is too //late to modify them, and ignore unnormalized ones //since they are transient. if (!mod.inited && !mod.map.unnormalized) { mod.map = makeModuleMap(id); } }); //If a deps array or a config callback is specified, then call //require with those args. This is useful when require is defined as a //config object before require.js is loaded. if (cfg.deps || cfg.callback) { context.require(cfg.deps || [], cfg.callback); } }, makeShimExports: function (value) { function fn() { var ret; if (value.init) { ret = value.init.apply(global, arguments); } return ret || (value.exports && getGlobal(value.exports)); } return fn; }, makeRequire: function (relMap, options) { options = options || {}; function localRequire(deps, callback, errback) { var id, map, requireMod; if (options.enableBuildCallback && callback && isFunction(callback)) { callback.__requireJsBuild = true; } if (typeof deps === 'string') { if (isFunction(callback)) { //Invalid call return onError(makeError('requireargs', 'Invalid require call'), errback); } //If require|exports|module are requested, get the //value for them from the special handlers. Caveat: //this only works while module is being defined. if (relMap && hasProp(handlers, deps)) { return handlers[deps](registry[relMap.id]); } //Synchronous access to one module. If require.get is //available (as in the Node adapter), prefer that. if (req.get) { return req.get(context, deps, relMap, localRequire); } //Normalize module name, if it contains . or .. map = makeModuleMap(deps, relMap, false, true); id = map.id; if (!hasProp(defined, id)) { return onError(makeError('notloaded', 'Module name "' + id + '" has not been loaded yet for context: ' + contextName + (relMap ? '' : '. Use require([])'))); } return defined[id]; } //Grab defines waiting in the global queue. intakeDefines(); //Mark all the dependencies as needing to be loaded. context.nextTick(function () { //Some defines could have been added since the //require call, collect them. intakeDefines(); requireMod = getModule(makeModuleMap(null, relMap)); //Store if map config should be applied to this require //call for dependencies. requireMod.skipMap = options.skipMap; requireMod.init(deps, callback, errback, { enabled: true }); checkLoaded(); }); return localRequire; } mixin(localRequire, { isBrowser: isBrowser, /** * Converts a module name + .extension into an URL path. * *Requires* the use of a module name. It does not support using * plain URLs like nameToUrl. */ toUrl: function (moduleNamePlusExt) { var ext, index = moduleNamePlusExt.lastIndexOf('.'), segment = moduleNamePlusExt.split('/')[0], isRelative = segment === '.' || segment === '..'; //Have a file extension alias, and it is not the //dots from a relative path. if (index !== -1 && (!isRelative || index > 1)) { ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length); moduleNamePlusExt = moduleNamePlusExt.substring(0, index); } return context.nameToUrl(normalize(moduleNamePlusExt, relMap && relMap.id, true), ext, true); }, defined: function (id) { return hasProp(defined, makeModuleMap(id, relMap, false, true).id); }, specified: function (id) { id = makeModuleMap(id, relMap, false, true).id; return hasProp(defined, id) || hasProp(registry, id); } }); //Only allow undef on top level require calls if (!relMap) { localRequire.undef = function (id) { //Bind any waiting define() calls to this context, //fix for #408 takeGlobalQueue(); var map = makeModuleMap(id, relMap, true), mod = getOwn(registry, id); removeScript(id); delete defined[id]; delete urlFetched[map.url]; delete undefEvents[id]; //Clean queued defines too. Go backwards //in array so that the splices do not //mess up the iteration. eachReverse(defQueue, function (args, i) { if (args[0] === id) { defQueue.splice(i, 1); } }); if (mod) { //Hold on to listeners in case the //module will be attempted to be reloaded //using a different config. if (mod.events.defined) { undefEvents[id] = mod.events; } cleanRegistry(id); } }; } return localRequire; }, /** * Called to enable a module if it is still in the registry * awaiting enablement. A second arg, parent, the parent module, * is passed in for context, when this method is overridden by * the optimizer. Not shown here to keep code compact. */ enable: function (depMap) { var mod = getOwn(registry, depMap.id); if (mod) { getModule(depMap).enable(); } }, /** * Internal method used by environment adapters to complete a load event. * A load event could be a script load or just a load pass from a synchronous * load call. * @param {String} moduleName the name of the module to potentially complete. */ completeLoad: function (moduleName) { var found, args, mod, shim = getOwn(config.shim, moduleName) || {}, shExports = shim.exports; takeGlobalQueue(); while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { args[0] = moduleName; //If already found an anonymous module and bound it //to this name, then this is some other anon module //waiting for its completeLoad to fire. if (found) { break; } found = true; } else if (args[0] === moduleName) { //Found matching define call for this script! found = true; } callGetModule(args); } //Do this after the cycle of callGetModule in case the result //of those calls/init calls changes the registry. mod = getOwn(registry, moduleName); if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) { if (config.enforceDefine && (!shExports || !getGlobal(shExports))) { if (hasPathFallback(moduleName)) { return; } else { return onError(makeError('nodefine', 'No define call for ' + moduleName, null, [moduleName])); } } else { //A script that does not call define(), so just simulate //the call for it. callGetModule([moduleName, (shim.deps || []), shim.exportsFn]); } } checkLoaded(); }, /** * Converts a module name to a file path. Supports cases where * moduleName may actually be just an URL. * Note that it **does not** call normalize on the moduleName, * it is assumed to have already been normalized. This is an * internal API, not a public one. Use toUrl for the public API. */ nameToUrl: function (moduleName, ext, skipExt) { var paths, syms, i, parentModule, url, parentPath, bundleId, pkgMain = getOwn(config.pkgs, moduleName); if (pkgMain) { moduleName = pkgMain; } bundleId = getOwn(bundlesMap, moduleName); if (bundleId) { return context.nameToUrl(bundleId, ext, skipExt); } //If a colon is in the URL, it indicates a protocol is used and it is just //an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?) //or ends with .js, then assume the user meant to use an url and not a module id. //The slash is important for protocol-less URLs as well as full paths. if (req.jsExtRegExp.test(moduleName)) { //Just a plain path, not module name lookup, so just return it. //Add extension if it is included. This is a bit wonky, only non-.js things pass //an extension, this method probably needs to be reworked. url = moduleName + (ext || ''); } else { //A module that needs to be converted to a path. paths = config.paths; syms = moduleName.split('/'); //For each module name segment, see if there is a path //registered for it. Start with most specific name //and work up from it. for (i = syms.length; i > 0; i -= 1) { parentModule = syms.slice(0, i).join('/'); parentPath = getOwn(paths, parentModule); if (parentPath) { //If an array, it means there are a few choices, //Choose the one that is desired if (isArray(parentPath)) { parentPath = parentPath[0]; } syms.splice(0, i, parentPath); break; } } //Join the path parts together, then figure out if baseUrl is needed. url = syms.join('/'); url += (ext || (/^data\:|\?/.test(url) || skipExt ? '' : '.js')); url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url; } return config.urlArgs ? url + ((url.indexOf('?') === -1 ? '?' : '&') + config.urlArgs) : url; }, //Delegates to req.load. Broken out as a separate function to //allow overriding in the optimizer. load: function (id, url) { req.load(context, id, url); }, /** * Executes a module callback function. Broken out as a separate function * solely to allow the build system to sequence the files in the built * layer in the right sequence. * * @private */ execCb: function (name, callback, args, exports) { return callback.apply(exports, args); }, /** * callback for script loads, used to check status of loading. * * @param {Event} evt the event from the browser for the script * that was loaded. */ onScriptLoad: function (evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. if (evt.type === 'load' || (readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) { //Reset interactive script so a script node is not held onto for //to long. interactiveScript = null; //Pull out the name of the module and the context. var data = getScriptData(evt); context.completeLoad(data.id); } }, /** * Callback for script errors. */ onScriptError: function (evt) { var data = getScriptData(evt); if (!hasPathFallback(data.id)) { return onError(makeError('scripterror', 'Script error for: ' + data.id, evt, [data.id])); } } }; context.require = context.makeRequire(); return context; } /** * Main entry point. * * If the only argument to require is a string, then the module that * is represented by that string is fetched for the appropriate context. * * If the first argument is an array, then it will be treated as an array * of dependency string names to fetch. An optional function callback can * be specified to execute when all of those dependencies are available. * * Make a local req variable to help Caja compliance (it assumes things * on a require that are not standardized), and to give a short * name for minification/local scope use. */ req = requirejs = function (deps, callback, errback, optional) { //Find the right context, use default var context, config, contextName = defContextName; // Determine if have config object in the call. if (!isArray(deps) && typeof deps !== 'string') { // deps is a config object config = deps; if (isArray(callback)) { // Adjust args if there are dependencies deps = callback; callback = errback; errback = optional; } else { deps = []; } } if (config && config.context) { contextName = config.context; } context = getOwn(contexts, contextName); if (!context) { context = contexts[contextName] = req.s.newContext(contextName); } if (config) { context.configure(config); } return context.require(deps, callback, errback); }; /** * Support require.config() to make it easier to cooperate with other * AMD loaders on globally agreed names. */ req.config = function (config) { return req(config); }; /** * Execute something after the current tick * of the event loop. Override for other envs * that have a better solution than setTimeout. * @param {Function} fn function to execute later. */ req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) { setTimeout(fn, 4); } : function (fn) { fn(); }; /** * Export require as a global, but only if it does not already exist. */ if (!require) { require = req; } req.version = version; //Used to filter out dependencies that are already paths. req.jsExtRegExp = /^\/|:|\?|\.js$/; req.isBrowser = isBrowser; s = req.s = { contexts: contexts, newContext: newContext }; //Create default context. req({}); //Exports some context-sensitive methods on global require. each([ 'toUrl', 'undef', 'defined', 'specified' ], function (prop) { //Reference from contexts instead of early binding to default context, //so that during builds, the latest instance of the default context //with its config gets used. req[prop] = function () { var ctx = contexts[defContextName]; return ctx.require[prop].apply(ctx, arguments); }; }); if (isBrowser) { head = s.head = document.getElementsByTagName('head')[0]; //If BASE tag is in play, using appendChild is a problem for IE6. //When that browser dies, this can be removed. Details in this jQuery bug: //http://dev.jquery.com/ticket/2709 baseElement = document.getElementsByTagName('base')[0]; if (baseElement) { head = s.head = baseElement.parentNode; } } /** * Any errors that require explicitly generates will be passed to this * function. Intercept/override it if you want custom error handling. * @param {Error} err the error object. */ req.onError = defaultOnError; /** * Creates the node for the load command. Only used in browser envs. */ req.createNode = function (config, moduleName, url) { var node = config.xhtml ? document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') : document.createElement('script'); node.type = config.scriptType || 'text/javascript'; node.charset = 'utf-8'; node.async = true; return node; }; /** * Does the request to load a module for the browser case. * Make this a separate function to allow other environments * to override it. * * @param {Object} context the require context to find state. * @param {String} moduleName the name of the module. * @param {Object} url the URL to the module. */ req.load = function (context, moduleName, url) { var config = (context && context.config) || {}, node; if (isBrowser) { //In the browser so use a script tag node = req.createNode(config, moduleName, url); node.setAttribute('data-requirecontext', context.contextName); node.setAttribute('data-requiremodule', moduleName); //Set up load listener. Test attachEvent first because IE9 has //a subtle issue in its addEventListener and script onload firings //that do not match the behavior of all other browsers with //addEventListener support, which fire the onload event for a //script right after the script execution. See: //https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution //UNFORTUNATELY Opera implements attachEvent but does not follow the script //script execution mode. if (node.attachEvent && //Check if node.attachEvent is artificially added by custom script or //natively supported by browser //read https://github.com/jrburke/requirejs/issues/187 //if we can NOT find [native code] then it must NOT natively supported. //in IE8, node.attachEvent does not have toString() //Note the test for "[native code" with no closing brace, see: //https://github.com/jrburke/requirejs/issues/273 !(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) && !isOpera) { //Probably IE. IE (at least 6-8) do not fire //script onload right after executing the script, so //we cannot tie the anonymous define call to a name. //However, IE reports the script as being in 'interactive' //readyState at the time of the define call. useInteractive = true; node.attachEvent('onreadystatechange', context.onScriptLoad); //It would be great to add an error handler here to catch //404s in IE9+. However, onreadystatechange will fire before //the error handler, so that does not help. If addEventListener //is used, then IE will fire error before load, but we cannot //use that pathway given the connect.microsoft.com issue //mentioned above about not doing the 'script execute, //then fire the script load event listener before execute //next script' that other browsers do. //Best hope: IE10 fixes the issues, //and then destroys all installs of IE 6-9. //node.attachEvent('onerror', context.onScriptError); } else { node.addEventListener('load', context.onScriptLoad, false); node.addEventListener('error', context.onScriptError, false); } node.src = url; //For some cache cases in IE 6-8, the script executes before the end //of the appendChild execution, so to tie an anonymous define //call to the module name (which is stored on the node), hold on //to a reference to this node, but clear after the DOM insertion. currentlyAddingScript = node; if (baseElement) { head.insertBefore(node, baseElement); } else { head.appendChild(node); } currentlyAddingScript = null; return node; } else if (isWebWorker) { try { //In a web worker, use importScripts. This is not a very //efficient use of importScripts, importScripts will block until //its script is downloaded and evaluated. However, if web workers //are in play, the expectation that a build has been done so that //only one script needs to be loaded anyway. This may need to be //reevaluated if other use cases become common. importScripts(url); //Account for anonymous modules context.completeLoad(moduleName); } catch (e) { context.onError(makeError('importscripts', 'importScripts failed for ' + moduleName + ' at ' + url, e, [moduleName])); } } }; function getInteractiveScript() { if (interactiveScript && interactiveScript.readyState === 'interactive') { return interactiveScript; } eachReverse(scripts(), function (script) { if (script.readyState === 'interactive') { return (interactiveScript = script); } }); return interactiveScript; } //Look for a data-main script attribute, which could also adjust the baseUrl. if (isBrowser && !cfg.skipDataMain) { //Figure out baseUrl. Get it from the script tag with require.js in it. eachReverse(scripts(), function (script) { //Set the 'head' where we can append children by //using the script's parent. if (!head) { head = script.parentNode; } //Look for a data-main attribute to set main script for the page //to load. If it is there, the path to data main becomes the //baseUrl, if it is not already set. dataMain = script.getAttribute('data-main'); if (dataMain) { //Preserve dataMain in case it is a path (i.e. contains '?') mainScript = dataMain; //Set final baseUrl if there is not already an explicit one. if (!cfg.baseUrl) { //Pull off the directory of data-main for use as the //baseUrl. src = mainScript.split('/'); mainScript = src.pop(); subPath = src.length ? src.join('/') + '/' : './'; cfg.baseUrl = subPath; } //Strip off any trailing .js since mainScript is now //like a module name. mainScript = mainScript.replace(jsSuffixRegExp, ''); //If mainScript is still a path, fall back to dataMain if (req.jsExtRegExp.test(mainScript)) { mainScript = dataMain; } //Put the data-main script in the files to load. cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript]; return true; } }); } /** * The function that handles definitions of modules. Differs from * require() in that a string for the module should be the first argument, * and the function to execute after dependencies are loaded should * return a value to define the module corresponding to the first argument's * name. */ define = function (name, deps, callback) { var node, context; //Allow for anonymous modules if (typeof name !== 'string') { //Adjust args appropriately callback = deps; deps = name; name = null; } //This module may not have dependencies if (!isArray(deps)) { callback = deps; deps = null; } //If no name, and callback is a function, then figure out if it a //CommonJS thing with dependencies. if (!deps && isFunction(callback)) { deps = []; //Remove comments from the callback string, //look for require calls, and pull them into the dependencies, //but only if there are function args. if (callback.length) { callback .toString() .replace(commentRegExp, '') .replace(cjsRequireRegExp, function (match, dep) { deps.push(dep); }); //May be a CommonJS thing even without require calls, but still //could use exports, and module. Avoid doing exports and module //work though if it just needs require. //REQUIRES the function to expect the CommonJS variables in the //order listed below. deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps); } } //If in IE 6-8 and hit an anonymous define() call, do the interactive //work. if (useInteractive) { node = currentlyAddingScript || getInteractiveScript(); if (node) { if (!name) { name = node.getAttribute('data-requiremodule'); } context = contexts[node.getAttribute('data-requirecontext')]; } } //Always save off evaluating the def call until the script onload handler. //This allows multiple modules to be in a file without prematurely //tracing dependencies, and allows for anonymous module support, //where the module name is not known until the script onload event //occurs. If no context, use the global queue, and get it processed //in the onscript load callback. (context ? context.defQueue : globalDefQueue).push([name, deps, callback]); }; define.amd = { jQuery: true }; /** * Executes the text. Normally just uses eval, but can be modified * to use a better, environment-specific call. Only used for transpiling * loader plugins, not for plain JS modules. * @param {String} text the text to execute/evaluate. */ req.exec = function (text) { /*jslint evil: true */ return eval(text); }; //Set up with config info. req(cfg); }(this));
Java
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.dmn.engine.impl.parser; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.activiti.dmn.engine.ActivitiDmnException; import org.activiti.dmn.engine.DmnEngineConfiguration; import org.activiti.dmn.engine.impl.context.Context; import org.activiti.dmn.engine.impl.io.InputStreamSource; import org.activiti.dmn.engine.impl.io.ResourceStreamSource; import org.activiti.dmn.engine.impl.io.StreamSource; import org.activiti.dmn.engine.impl.io.StringStreamSource; import org.activiti.dmn.engine.impl.io.UrlStreamSource; import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntity; import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntity; import org.activiti.dmn.model.Decision; import org.activiti.dmn.model.DmnDefinition; import org.activiti.dmn.xml.constants.DmnXMLConstants; import org.activiti.dmn.xml.converter.DmnXMLConverter; import org.activiti.dmn.xml.exception.DmnXMLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Specific parsing of one BPMN 2.0 XML file, created by the {@link DmnParse}. * * @author Tijs Rademakers * @author Joram Barrez */ public class DmnParse implements DmnXMLConstants { protected static final Logger LOGGER = LoggerFactory.getLogger(DmnParse.class); protected String name; protected boolean validateSchema = true; protected StreamSource streamSource; protected String sourceSystemId; protected DmnDefinition dmnDefinition; protected String targetNamespace; /** The deployment to which the parsed decision tables will be added. */ protected DmnDeploymentEntity deployment; /** The end result of the parsing: a list of decision tables. */ protected List<DecisionTableEntity> decisionTables = new ArrayList<DecisionTableEntity>(); public DmnParse deployment(DmnDeploymentEntity deployment) { this.deployment = deployment; return this; } public DmnParse execute(DmnEngineConfiguration dmnEngineConfig) { try { DmnXMLConverter converter = new DmnXMLConverter(); boolean enableSafeDmnXml = dmnEngineConfig.isEnableSafeDmnXml(); String encoding = dmnEngineConfig.getXmlEncoding(); if (encoding != null) { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml, encoding); } else { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml); } if (dmnDefinition != null && dmnDefinition.getDecisions() != null) { for (Decision decision : dmnDefinition.getDecisions()) { DecisionTableEntity decisionTableEntity = Context.getDmnEngineConfiguration().getDecisionTableEntityManager().create(); decisionTableEntity.setKey(decision.getId()); decisionTableEntity.setName(decision.getName()); decisionTableEntity.setResourceName(name); decisionTableEntity.setDeploymentId(deployment.getId()); decisionTableEntity.setParentDeploymentId(deployment.getParentDeploymentId()); decisionTableEntity.setDescription(decision.getDescription()); decisionTables.add(decisionTableEntity); } } } catch (Exception e) { if (e instanceof ActivitiDmnException) { throw (ActivitiDmnException) e; } else if (e instanceof DmnXMLException) { throw (DmnXMLException) e; } else { throw new ActivitiDmnException("Error parsing XML", e); } } return this; } public DmnParse name(String name) { this.name = name; return this; } public DmnParse sourceInputStream(InputStream inputStream) { if (name == null) { name("inputStream"); } setStreamSource(new InputStreamSource(inputStream)); return this; } public DmnParse sourceUrl(URL url) { if (name == null) { name(url.toString()); } setStreamSource(new UrlStreamSource(url)); return this; } public DmnParse sourceUrl(String url) { try { return sourceUrl(new URL(url)); } catch (MalformedURLException e) { throw new ActivitiDmnException("malformed url: " + url, e); } } public DmnParse sourceResource(String resource) { if (name == null) { name(resource); } setStreamSource(new ResourceStreamSource(resource)); return this; } public DmnParse sourceString(String string) { if (name == null) { name("string"); } setStreamSource(new StringStreamSource(string)); return this; } protected void setStreamSource(StreamSource streamSource) { if (this.streamSource != null) { throw new ActivitiDmnException("invalid: multiple sources " + this.streamSource + " and " + streamSource); } this.streamSource = streamSource; } public String getSourceSystemId() { return sourceSystemId; } public DmnParse setSourceSystemId(String sourceSystemId) { this.sourceSystemId = sourceSystemId; return this; } /* * ------------------- GETTERS AND SETTERS ------------------- */ public boolean isValidateSchema() { return validateSchema; } public void setValidateSchema(boolean validateSchema) { this.validateSchema = validateSchema; } public List<DecisionTableEntity> getDecisionTables() { return decisionTables; } public String getTargetNamespace() { return targetNamespace; } public DmnDeploymentEntity getDeployment() { return deployment; } public void setDeployment(DmnDeploymentEntity deployment) { this.deployment = deployment; } public DmnDefinition getDmnDefinition() { return dmnDefinition; } public void setDmnDefinition(DmnDefinition dmnDefinition) { this.dmnDefinition = dmnDefinition; } }
Java
import application = require("application"); // Specify custom UIApplicationDelegate. /* class MyDelegate extends UIResponder implements UIApplicationDelegate { public static ObjCProtocols = [UIApplicationDelegate]; applicationDidFinishLaunchingWithOptions(application: UIApplication, launchOptions: NSDictionary): boolean { console.log("applicationWillFinishLaunchingWithOptions: " + launchOptions) return true; } applicationDidBecomeActive(application: UIApplication): void { console.log("applicationDidBecomeActive: " + application) } } application.ios.delegate = MyDelegate; */ if (application.ios) { // Observe application notifications. application.ios.addNotificationObserver(UIApplicationDidFinishLaunchingNotification, (notification: NSNotification) => { console.log("UIApplicationDidFinishLaunchingNotification: " + notification) }); } application.mainModule = "app/mainPage"; // Common events for both Android and iOS. application.on(application.launchEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android.content.Intent class. console.log("Launched Android application with the following intent: " + args.android + "."); } else if (args.ios !== undefined) { // For iOS applications, args.ios is NSDictionary (launchOptions). console.log("Launched iOS application with options: " + args.ios); } }); application.on(application.suspendEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.resumeEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.exitEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.lowMemoryEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.uncaughtErrorEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an NativeScriptError. console.log("NativeScriptError: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is NativeScriptError. console.log("NativeScriptError: " + args.ios); } }); // Android activity events if (application.android) { application.android.on(application.AndroidApplication.activityCreatedEvent, function (args: application.AndroidActivityBundleEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle); }); application.android.on(application.AndroidApplication.activityDestroyedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityStartedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityPausedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityResumedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityStoppedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.saveActivityStateEvent, function (args: application.AndroidActivityBundleEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle); }); application.android.on(application.AndroidApplication.activityResultEvent, function (args: application.AndroidActivityResultEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", requestCode: " + args.requestCode + ", resultCode: " + args.resultCode + ", Intent: " + args.intent); }); application.android.on(application.AndroidApplication.activityBackPressedEvent, function (args: application.AndroidActivityBackPressedEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); // Set args.cancel = true to cancel back navigation and do something custom. }); } application.start();
Java
//// [variadicTuples1.ts] // Variadics in tuple types type TV0<T extends unknown[]> = [string, ...T]; type TV1<T extends unknown[]> = [string, ...T, number]; type TV2<T extends unknown[]> = [string, ...T, number, ...T]; type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T]; // Normalization type TN1 = TV1<[boolean, string]>; type TN2 = TV1<[]>; type TN3 = TV1<[boolean?]>; type TN4 = TV1<string[]>; type TN5 = TV1<[boolean] | [symbol, symbol]>; type TN6 = TV1<any>; type TN7 = TV1<never>; // Variadics in array literals function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]) { return [1, ...t, 2, ...u, 3] as const; } const t2 = tup2(['hello'], [10, true]); function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U] { return [...t, ...u]; } declare const sa: string[]; const tc1 = concat([], []); const tc2 = concat(['hello'], [42]); const tc3 = concat([1, 2, 3], sa); const tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number] function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U) { return [...t, ...u]; // (T[number] | U[number])[] } const tc5 = concat2([1, 2, 3] as const, [4, 5, 6] as const); // (1 | 2 | 3 | 4 | 5 | 6)[] // Spread arguments declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void; function foo2(t1: [number, string], t2: [boolean], a1: number[]) { foo1(1, 'abc', true, 42, 43, 44); foo1(...t1, true, 42, 43, 44); foo1(...t1, ...t2, 42, 43, 44); foo1(...t1, ...t2, ...a1); foo1(...t1); // Error foo1(...t1, 45); // Error } declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T; function foo4<U extends unknown[]>(u: U) { foo3(1, 2); foo3(1, 'hello', true, 2); foo3(1, ...u, 'hi', 2); foo3(1); } // Contextual typing of array literals declare function ft1<T extends unknown[]>(t: T): T; declare function ft2<T extends unknown[]>(t: T): readonly [...T]; declare function ft3<T extends unknown[]>(t: [...T]): T; declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T]; ft1(['hello', 42]); // (string | number)[] ft2(['hello', 42]); // readonly (string | number)[] ft3(['hello', 42]); // [string, number] ft4(['hello', 42]); // readonly [string, number] // Indexing variadic tuple types function f0<T extends unknown[]>(t: [string, ...T], n: number) { const a = t[0]; // string const b = t[1]; // [string, ...T][1] const c = t[2]; // [string, ...T][2] const d = t[n]; // [string, ...T][number] } function f1<T extends unknown[]>(t: [string, ...T, number], n: number) { const a = t[0]; // string const b = t[1]; // [string, ...T, number][1] const c = t[2]; // [string, ...T, number][2] const d = t[n]; // [string, ...T, number][number] } // Destructuring variadic tuple types function f2<T extends unknown[]>(t: [string, ...T]) { let [...ax] = t; // [string, ...T] let [b1, ...bx] = t; // string, [...T] let [c1, c2, ...cx] = t; // string, [string, ...T][1], T[number][] } function f3<T extends unknown[]>(t: [string, ...T, number]) { let [...ax] = t; // [string, ...T, number] let [b1, ...bx] = t; // string, [...T, number] let [c1, c2, ...cx] = t; // string, [string, ...T, number][1], (number | T[number])[] } // Mapped types applied to variadic tuple types type Arrayify<T> = { [P in keyof T]: T[P][] }; type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>; // [string[], (number | undefined)[]?, Arrayify<U>, ...boolean[][]] type TP1<T extends unknown[]> = Partial<[string, ...T, number]>; // [string?, Partial<T>, number?] type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>; // [string?, Partial<T>, ...(number | undefined)[]] // Reverse mapping through mapped type applied to variadic tuple type declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T; let tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string] // Spread of readonly array-like infers mutable array-like declare function fx1<T extends unknown[]>(a: string, ...args: T): T; function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) { fx1('abc'); // [] fx1('abc', ...u); // U fx1('abc', ...v); // [...V] fx1<U>('abc', ...u); // U fx1<V>('abc', ...v); // Error } declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T; function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) { fx2('abc'); // [] fx2('abc', ...u); // U fx2('abc', ...v); // [...V] fx2<U>('abc', ...u); // U fx2<V>('abc', ...v); // V } // Relations involving variadic tuple types function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]) { x = y; x = z; y = x; // Error y = z; z = x; // Error z = y; // Error } // For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable // to [...T] when T is constrained to a mutable array or tuple type. function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]) { t = m; t = r; // Error m = t; m = r; // Error r = t; r = m; } function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]) { t = m; t = r; // Error m = t; // Error m = r; // Error r = t; r = m; } function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]) { t0 = t1; t0 = t2; t1 = t0; t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]) { t0 = t1; t0 = t2; t1 = t0; // Error t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]) { k0 = 'length'; k1 = 'length'; k2 = 'length'; k0 = 'slice'; k1 = 'slice'; k2 = 'slice'; k3 = '0'; k3 = '1'; k3 = '2'; // Error } // Inference between variadic tuple types type First<T extends readonly unknown[]> = T extends readonly [unknown, ...unknown[]] ? T[0] : T[0] | undefined; type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T]; type Last<T extends readonly unknown[]> = T extends readonly [...unknown[], infer U] ? U : T extends readonly [unknown, ...unknown[]] ? T[number] : T[number] | undefined; type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T]; type T00 = First<[number, symbol, string]>; type T01 = First<[symbol, string]>; type T02 = First<[string]>; type T03 = First<[number, symbol, ...string[]]>; type T04 = First<[symbol, ...string[]]>; type T05 = First<[string?]>; type T06 = First<string[]>; type T07 = First<[]>; type T08 = First<any>; type T09 = First<never>; type T10 = DropFirst<[number, symbol, string]>; type T11 = DropFirst<[symbol, string]>; type T12 = DropFirst<[string]>; type T13 = DropFirst<[number, symbol, ...string[]]>; type T14 = DropFirst<[symbol, ...string[]]>; type T15 = DropFirst<[string?]>; type T16 = DropFirst<string[]>; type T17 = DropFirst<[]>; type T18 = DropFirst<any>; type T19 = DropFirst<never>; type T20 = Last<[number, symbol, string]>; type T21 = Last<[symbol, string]>; type T22 = Last<[string]>; type T23 = Last<[number, symbol, ...string[]]>; type T24 = Last<[symbol, ...string[]]>; type T25 = Last<[string?]>; type T26 = Last<string[]>; type T27 = Last<[]>; type T28 = Last<any>; type T29 = Last<never>; type T30 = DropLast<[number, symbol, string]>; type T31 = DropLast<[symbol, string]>; type T32 = DropLast<[string]>; type T33 = DropLast<[number, symbol, ...string[]]>; type T34 = DropLast<[symbol, ...string[]]>; type T35 = DropLast<[string?]>; type T36 = DropLast<string[]>; type T37 = DropLast<[]>; // unknown[], maybe should be [] type T38 = DropLast<any>; type T39 = DropLast<never>; type R00 = First<readonly [number, symbol, string]>; type R01 = First<readonly [symbol, string]>; type R02 = First<readonly [string]>; type R03 = First<readonly [number, symbol, ...string[]]>; type R04 = First<readonly [symbol, ...string[]]>; type R05 = First<readonly string[]>; type R06 = First<readonly []>; type R10 = DropFirst<readonly [number, symbol, string]>; type R11 = DropFirst<readonly [symbol, string]>; type R12 = DropFirst<readonly [string]>; type R13 = DropFirst<readonly [number, symbol, ...string[]]>; type R14 = DropFirst<readonly [symbol, ...string[]]>; type R15 = DropFirst<readonly string[]>; type R16 = DropFirst<readonly []>; type R20 = Last<readonly [number, symbol, string]>; type R21 = Last<readonly [symbol, string]>; type R22 = Last<readonly [string]>; type R23 = Last<readonly [number, symbol, ...string[]]>; type R24 = Last<readonly [symbol, ...string[]]>; type R25 = Last<readonly string[]>; type R26 = Last<readonly []>; type R30 = DropLast<readonly [number, symbol, string]>; type R31 = DropLast<readonly [symbol, string]>; type R32 = DropLast<readonly [string]>; type R33 = DropLast<readonly [number, symbol, ...string[]]>; type R34 = DropLast<readonly [symbol, ...string[]]>; type R35 = DropLast<readonly string[]>; type R36 = DropLast<readonly []>; // Inference to [...T, ...U] with implied arity for T function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T) { return (...b: U) => f(...a, ...b); } const fn1 = (a: number, b: string, c: boolean, d: string[]) => 0; const c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number const c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number const c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number const c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number const c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number const fn2 = (x: number, b: boolean, ...args: string[]) => 0; const c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number const c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number const c12 = curry(fn2, 1, true); // (...args: string[]) => number const c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number const fn3 = (...args: string[]) => 0; const c20 = curry(fn3); // (...args: string[]) => number const c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number const c22 = curry(fn3, ...sa); // (...args: string[]) => number // No inference to [...T, ...U] when there is no implied arity function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]) { return f(...t, ...u); } declare function fn10(a: string, b: number, c: boolean): string[]; curry2(fn10, ['hello', 42], [true]); curry2(fn10, ['hello'], [42, true]); // Inference to [...T] has higher priority than inference to [...T, number?] declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T; ft([1, 2, 3], [1, 2, 3]); ft([1, 2], [1, 2, 3]); ft(['a', 'b'], ['c', 'd']) ft(['a', 'b'], ['c', 'd', 42]) // Last argument is contextually typed declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R]; call('hello', 32, (a, b) => 42); call(...sa, (...x) => 42); // No inference to ending optional elements (except with identical structure) declare function f20<T extends unknown[] = []>(args: [...T, number?]): T; function f21<U extends string[]>(args: [...U, number?]) { let v1 = f20(args); // U let v2 = f20(["foo", "bar"]); // [string] let v3 = f20(["foo", 42]); // [string] } declare function f22<T extends unknown[] = []>(args: [...T, number]): T; declare function f22<T extends unknown[] = []>(args: [...T]): T; function f23<U extends string[]>(args: [...U, number]) { let v1 = f22(args); // U let v2 = f22(["foo", "bar"]); // [string, string] let v3 = f22(["foo", 42]); // [string] } // Repro from #39327 interface Desc<A extends unknown[], T> { readonly f: (...args: A) => T; bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>; } declare const a: Desc<[string, number, boolean], object>; const b = a.bind("", 1); // Desc<[boolean], object> // Repro from #39607 declare function getUser(id: string, options?: { x?: string }): string; declare function getOrgUser(id: string, orgId: number, options?: { y?: number, z?: boolean }): void; function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U) { return (...args: [...T]) => method(...args, {}); } callApi(getUser); callApi(getOrgUser); // Repro from #40235 type Numbers = number[]; type Unbounded = [...Numbers, boolean]; const data: Unbounded = [false, false]; // Error type U1 = [string, ...Numbers, boolean]; type U2 = [...[string, ...Numbers], boolean]; type U3 = [...[string, number], boolean]; //// [variadicTuples1.js] "use strict"; // Variadics in tuple types var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { if (ar || !(i in from)) { if (!ar) ar = Array.prototype.slice.call(from, 0, i); ar[i] = from[i]; } } return to.concat(ar || Array.prototype.slice.call(from)); }; // Variadics in array literals function tup2(t, u) { return __spreadArray(__spreadArray(__spreadArray(__spreadArray([1], t, true), [2], false), u, true), [3], false); } var t2 = tup2(['hello'], [10, true]); function concat(t, u) { return __spreadArray(__spreadArray([], t, true), u, true); } var tc1 = concat([], []); var tc2 = concat(['hello'], [42]); var tc3 = concat([1, 2, 3], sa); var tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number] function concat2(t, u) { return __spreadArray(__spreadArray([], t, true), u, true); // (T[number] | U[number])[] } var tc5 = concat2([1, 2, 3], [4, 5, 6]); // (1 | 2 | 3 | 4 | 5 | 6)[] function foo2(t1, t2, a1) { foo1(1, 'abc', true, 42, 43, 44); foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [true, 42, 43, 44], false)); foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), [42, 43, 44], false)); foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), a1, false)); foo1.apply(void 0, t1); // Error foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [45], false)); // Error } function foo4(u) { foo3(1, 2); foo3(1, 'hello', true, 2); foo3.apply(void 0, __spreadArray(__spreadArray([1], u, false), ['hi', 2], false)); foo3(1); } ft1(['hello', 42]); // (string | number)[] ft2(['hello', 42]); // readonly (string | number)[] ft3(['hello', 42]); // [string, number] ft4(['hello', 42]); // readonly [string, number] // Indexing variadic tuple types function f0(t, n) { var a = t[0]; // string var b = t[1]; // [string, ...T][1] var c = t[2]; // [string, ...T][2] var d = t[n]; // [string, ...T][number] } function f1(t, n) { var a = t[0]; // string var b = t[1]; // [string, ...T, number][1] var c = t[2]; // [string, ...T, number][2] var d = t[n]; // [string, ...T, number][number] } // Destructuring variadic tuple types function f2(t) { var ax = t.slice(0); // [string, ...T] var b1 = t[0], bx = t.slice(1); // string, [...T] var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T][1], T[number][] } function f3(t) { var ax = t.slice(0); // [string, ...T, number] var b1 = t[0], bx = t.slice(1); // string, [...T, number] var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T, number][1], (number | T[number])[] } var tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string] function gx1(u, v) { fx1('abc'); // [] fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U fx1.apply(void 0, __spreadArray(['abc'], v, false)); // [...V] fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U fx1.apply(void 0, __spreadArray(['abc'], v, false)); // Error } function gx2(u, v) { fx2('abc'); // [] fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U fx2.apply(void 0, __spreadArray(['abc'], v, false)); // [...V] fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U fx2.apply(void 0, __spreadArray(['abc'], v, false)); // V } // Relations involving variadic tuple types function f10(x, y, z) { x = y; x = z; y = x; // Error y = z; z = x; // Error z = y; // Error } // For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable // to [...T] when T is constrained to a mutable array or tuple type. function f11(t, m, r) { t = m; t = r; // Error m = t; m = r; // Error r = t; r = m; } function f12(t, m, r) { t = m; t = r; // Error m = t; // Error m = r; // Error r = t; r = m; } function f13(t0, t1, t2) { t0 = t1; t0 = t2; t1 = t0; t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f14(t0, t1, t2) { t0 = t1; t0 = t2; t1 = t0; // Error t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f15(k0, k1, k2, k3) { k0 = 'length'; k1 = 'length'; k2 = 'length'; k0 = 'slice'; k1 = 'slice'; k2 = 'slice'; k3 = '0'; k3 = '1'; k3 = '2'; // Error } // Inference to [...T, ...U] with implied arity for T function curry(f) { var a = []; for (var _i = 1; _i < arguments.length; _i++) { a[_i - 1] = arguments[_i]; } return function () { var b = []; for (var _i = 0; _i < arguments.length; _i++) { b[_i] = arguments[_i]; } return f.apply(void 0, __spreadArray(__spreadArray([], a, false), b, false)); }; } var fn1 = function (a, b, c, d) { return 0; }; var c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number var c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number var c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number var c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number var c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number var fn2 = function (x, b) { var args = []; for (var _i = 2; _i < arguments.length; _i++) { args[_i - 2] = arguments[_i]; } return 0; }; var c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number var c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number var c12 = curry(fn2, 1, true); // (...args: string[]) => number var c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number var fn3 = function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } return 0; }; var c20 = curry(fn3); // (...args: string[]) => number var c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number var c22 = curry.apply(void 0, __spreadArray([fn3], sa, false)); // (...args: string[]) => number // No inference to [...T, ...U] when there is no implied arity function curry2(f, t, u) { return f.apply(void 0, __spreadArray(__spreadArray([], t, false), u, false)); } curry2(fn10, ['hello', 42], [true]); curry2(fn10, ['hello'], [42, true]); ft([1, 2, 3], [1, 2, 3]); ft([1, 2], [1, 2, 3]); ft(['a', 'b'], ['c', 'd']); ft(['a', 'b'], ['c', 'd', 42]); call('hello', 32, function (a, b) { return 42; }); call.apply(void 0, __spreadArray(__spreadArray([], sa, false), [function () { var x = []; for (var _i = 0; _i < arguments.length; _i++) { x[_i] = arguments[_i]; } return 42; }], false)); function f21(args) { var v1 = f20(args); // U var v2 = f20(["foo", "bar"]); // [string] var v3 = f20(["foo", 42]); // [string] } function f23(args) { var v1 = f22(args); // U var v2 = f22(["foo", "bar"]); // [string, string] var v3 = f22(["foo", 42]); // [string] } var b = a.bind("", 1); // Desc<[boolean], object> function callApi(method) { return function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } return method.apply(void 0, __spreadArray(__spreadArray([], args, false), [{}], false)); }; } callApi(getUser); callApi(getOrgUser); var data = [false, false]; // Error //// [variadicTuples1.d.ts] declare type TV0<T extends unknown[]> = [string, ...T]; declare type TV1<T extends unknown[]> = [string, ...T, number]; declare type TV2<T extends unknown[]> = [string, ...T, number, ...T]; declare type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T]; declare type TN1 = TV1<[boolean, string]>; declare type TN2 = TV1<[]>; declare type TN3 = TV1<[boolean?]>; declare type TN4 = TV1<string[]>; declare type TN5 = TV1<[boolean] | [symbol, symbol]>; declare type TN6 = TV1<any>; declare type TN7 = TV1<never>; declare function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): readonly [1, ...T, 2, ...U, 3]; declare const t2: readonly [1, string, 2, number, boolean, 3]; declare function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U]; declare const sa: string[]; declare const tc1: []; declare const tc2: [string, number]; declare const tc3: [number, number, number, ...string[]]; declare const tc4: [...string[], number, number, number]; declare function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U): (T[number] | U[number])[]; declare const tc5: (2 | 4 | 1 | 3 | 6 | 5)[]; declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void; declare function foo2(t1: [number, string], t2: [boolean], a1: number[]): void; declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T; declare function foo4<U extends unknown[]>(u: U): void; declare function ft1<T extends unknown[]>(t: T): T; declare function ft2<T extends unknown[]>(t: T): readonly [...T]; declare function ft3<T extends unknown[]>(t: [...T]): T; declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T]; declare function f0<T extends unknown[]>(t: [string, ...T], n: number): void; declare function f1<T extends unknown[]>(t: [string, ...T, number], n: number): void; declare function f2<T extends unknown[]>(t: [string, ...T]): void; declare function f3<T extends unknown[]>(t: [string, ...T, number]): void; declare type Arrayify<T> = { [P in keyof T]: T[P][]; }; declare type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>; declare type TP1<T extends unknown[]> = Partial<[string, ...T, number]>; declare type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>; declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T; declare let tm1: [boolean, string]; declare function fx1<T extends unknown[]>(a: string, ...args: T): T; declare function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void; declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T; declare function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void; declare function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]): void; declare function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]): void; declare function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]): void; declare function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void; declare function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void; declare function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]): void; declare type First<T extends readonly unknown[]> = T extends readonly [unknown, ...unknown[]] ? T[0] : T[0] | undefined; declare type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T]; declare type Last<T extends readonly unknown[]> = T extends readonly [...unknown[], infer U] ? U : T extends readonly [unknown, ...unknown[]] ? T[number] : T[number] | undefined; declare type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T]; declare type T00 = First<[number, symbol, string]>; declare type T01 = First<[symbol, string]>; declare type T02 = First<[string]>; declare type T03 = First<[number, symbol, ...string[]]>; declare type T04 = First<[symbol, ...string[]]>; declare type T05 = First<[string?]>; declare type T06 = First<string[]>; declare type T07 = First<[]>; declare type T08 = First<any>; declare type T09 = First<never>; declare type T10 = DropFirst<[number, symbol, string]>; declare type T11 = DropFirst<[symbol, string]>; declare type T12 = DropFirst<[string]>; declare type T13 = DropFirst<[number, symbol, ...string[]]>; declare type T14 = DropFirst<[symbol, ...string[]]>; declare type T15 = DropFirst<[string?]>; declare type T16 = DropFirst<string[]>; declare type T17 = DropFirst<[]>; declare type T18 = DropFirst<any>; declare type T19 = DropFirst<never>; declare type T20 = Last<[number, symbol, string]>; declare type T21 = Last<[symbol, string]>; declare type T22 = Last<[string]>; declare type T23 = Last<[number, symbol, ...string[]]>; declare type T24 = Last<[symbol, ...string[]]>; declare type T25 = Last<[string?]>; declare type T26 = Last<string[]>; declare type T27 = Last<[]>; declare type T28 = Last<any>; declare type T29 = Last<never>; declare type T30 = DropLast<[number, symbol, string]>; declare type T31 = DropLast<[symbol, string]>; declare type T32 = DropLast<[string]>; declare type T33 = DropLast<[number, symbol, ...string[]]>; declare type T34 = DropLast<[symbol, ...string[]]>; declare type T35 = DropLast<[string?]>; declare type T36 = DropLast<string[]>; declare type T37 = DropLast<[]>; declare type T38 = DropLast<any>; declare type T39 = DropLast<never>; declare type R00 = First<readonly [number, symbol, string]>; declare type R01 = First<readonly [symbol, string]>; declare type R02 = First<readonly [string]>; declare type R03 = First<readonly [number, symbol, ...string[]]>; declare type R04 = First<readonly [symbol, ...string[]]>; declare type R05 = First<readonly string[]>; declare type R06 = First<readonly []>; declare type R10 = DropFirst<readonly [number, symbol, string]>; declare type R11 = DropFirst<readonly [symbol, string]>; declare type R12 = DropFirst<readonly [string]>; declare type R13 = DropFirst<readonly [number, symbol, ...string[]]>; declare type R14 = DropFirst<readonly [symbol, ...string[]]>; declare type R15 = DropFirst<readonly string[]>; declare type R16 = DropFirst<readonly []>; declare type R20 = Last<readonly [number, symbol, string]>; declare type R21 = Last<readonly [symbol, string]>; declare type R22 = Last<readonly [string]>; declare type R23 = Last<readonly [number, symbol, ...string[]]>; declare type R24 = Last<readonly [symbol, ...string[]]>; declare type R25 = Last<readonly string[]>; declare type R26 = Last<readonly []>; declare type R30 = DropLast<readonly [number, symbol, string]>; declare type R31 = DropLast<readonly [symbol, string]>; declare type R32 = DropLast<readonly [string]>; declare type R33 = DropLast<readonly [number, symbol, ...string[]]>; declare type R34 = DropLast<readonly [symbol, ...string[]]>; declare type R35 = DropLast<readonly string[]>; declare type R36 = DropLast<readonly []>; declare function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T): (...b: U) => R; declare const fn1: (a: number, b: string, c: boolean, d: string[]) => number; declare const c0: (a: number, b: string, c: boolean, d: string[]) => number; declare const c1: (b: string, c: boolean, d: string[]) => number; declare const c2: (c: boolean, d: string[]) => number; declare const c3: (d: string[]) => number; declare const c4: () => number; declare const fn2: (x: number, b: boolean, ...args: string[]) => number; declare const c10: (x: number, b: boolean, ...args: string[]) => number; declare const c11: (b: boolean, ...args: string[]) => number; declare const c12: (...b: string[]) => number; declare const c13: (...b: string[]) => number; declare const fn3: (...args: string[]) => number; declare const c20: (...b: string[]) => number; declare const c21: (...b: string[]) => number; declare const c22: (...b: string[]) => number; declare function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]): R; declare function fn10(a: string, b: number, c: boolean): string[]; declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T; declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R]; declare function f20<T extends unknown[] = []>(args: [...T, number?]): T; declare function f21<U extends string[]>(args: [...U, number?]): void; declare function f22<T extends unknown[] = []>(args: [...T, number]): T; declare function f22<T extends unknown[] = []>(args: [...T]): T; declare function f23<U extends string[]>(args: [...U, number]): void; interface Desc<A extends unknown[], T> { readonly f: (...args: A) => T; bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>; } declare const a: Desc<[string, number, boolean], object>; declare const b: Desc<[boolean], object>; declare function getUser(id: string, options?: { x?: string; }): string; declare function getOrgUser(id: string, orgId: number, options?: { y?: number; z?: boolean; }): void; declare function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U): (...args_0: T) => U; declare type Numbers = number[]; declare type Unbounded = [...Numbers, boolean]; declare const data: Unbounded; declare type U1 = [string, ...Numbers, boolean]; declare type U2 = [...[string, ...Numbers], boolean]; declare type U3 = [...[string, number], boolean];
Java
/* Derby - Class org.apache.derbyBuild.MessageVetter Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyBuild; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.text.MessageFormat; import java.util.Enumeration; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.regex.Pattern; /** * Class that checks the message files for common problems. */ public class MessageVetter { /** * <p> * Check all the message translations in the specified directories for * common problems. Assume that all properties files in the directories * are message translations. * </p> * * <p> * If a problem is found, an error will be raised. * </p> * * @param args names of the directories to check */ public static void main(String[] args) throws IOException { FileFilter filter = new FileFilter() { public boolean accept(File pathname) { return pathname.getName().endsWith(".properties"); } }; for (String directory : args) { for (File file : new File(directory).listFiles(filter)) { new MessageVetter(file).vet(); } } } /** * A regular expression that matches a single-quote character that is * neither preceeded nor followed by another single-quote character. Used * by {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that messages contain two single-quotes in order to produce a * single apostrophe (dictated by {@code java.text.MessageFormat}). */ private static final Pattern LONE_QUOTE_PATTERN = Pattern.compile("^'[^']|[^']'[^']|[^']'$"); /** * A regular expression that matches a single-quote character that have * no adjacent single-quote or curly brace character. Used by * {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that all single-quotes are either correctly formatted apostrophes * or used for quoting curly braces, as required by * {@code java.text.MessageFormat}. */ private static final Pattern LONE_QUOTE_ALLOWED_PATTERN = Pattern.compile("^'[^'{}]|[^'{}]'[^'{}]|[^'{}]'$"); /** * A set of message identifiers in whose messages single-quotes may legally * appear with no adjacent single-quote character. This will be messages * where the single-quotes are needed to quote curly braces that should * appear literally in the message text. */ private static final Set<String> LONE_QUOTE_ALLOWED = new HashSet<String>(); static { // The IJ help text contains curly braces that need quoting. LONE_QUOTE_ALLOWED.add("IJ_HelpText"); // Some of the DRDA usage messages contain the text {on|off}, which // needs quoting. LONE_QUOTE_ALLOWED.add("DRDA_Usage8.I"); LONE_QUOTE_ALLOWED.add("DRDA_Usage11.I"); LONE_QUOTE_ALLOWED.add("PE_HelpText"); } /** The message file to check. */ private final File file; /** The properties found in the message file. */ private final Properties properties; /** * Create a new {@code MessageVetter} instance. * * @param file the file with the messages to check * @throws IOException if the file cannot be loaded */ private MessageVetter(File file) throws IOException { this.file = file; properties = new Properties(); FileInputStream in = new FileInputStream(file); try { properties.load(in); } finally { in.close(); } } /** * Vet the messages in this file. An error will be raised if an * ill-formatted message is found. */ private void vet() { Enumeration e = properties.propertyNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String message = properties.getProperty(key); vetMessage(key, message); } } /** * Vet a specific message. Raise an error if it is not well-formed. * * @param key the message identifier * @param message the message format specifier */ private void vetMessage(String key, String message) { checkSingleQuotes(key, message); checkValidMessageFormat(key, message); } /** * Check that single-quote characters are doubled, as required by * {@code java.text.MessageFormat}. Raise an error otherwise. * * @param key the message identifier * @param message the message format specifier */ private void checkSingleQuotes(String key, String message) { Pattern p; if (LONE_QUOTE_ALLOWED.contains(key)) { // In some messages we allow lone single-quote characters, but // only if they are used to quote curly braces. Use a regular // expression that finds all single-quotes that aren't adjacent to // another single-quote or a curly brace character. p = LONE_QUOTE_ALLOWED_PATTERN; } else { // Otherwise, we don't allow lone single-quote characters at all. p = LONE_QUOTE_PATTERN; } if (p.matcher(message).find()) { throw new AssertionError("Lone single-quote in message " + key + " in " + file + ".\nThis is OK if it is used for quoting " + "special characters in the message. If this is what the " + "character is used for, add an exception in " + getClass().getName() + ".LONE_QUOTE_ALLOWED."); } } /** * Check that a message format specifier is valid. Raise an error if it * is not. * * @param key the message identifier * @param message the message format specifier */ private void checkValidMessageFormat(String key, String message) { try { // See if a MessageFormat instance can be produced based on this // message format specifier. new MessageFormat(message); } catch (Exception e) { AssertionError ae = new AssertionError( "Message " + key + " in " + file + " isn't a valid " + "java.text.MessageFormat pattern."); ae.initCause(e); throw ae; } } }
Java
<?xml version='1.0' encoding='UTF-8'?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> <html> <head> <title>groupby - org.saddle.groupby</title> <meta name="description" content="groupby - org.saddle.groupby" /> <meta name="keywords" content="groupby org.saddle.groupby" /> <meta http-equiv="content-type" content="text/html; charset=UTF-8" /> <link href="../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" /> <link href="../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" /> <script type="text/javascript"> if(top === self) { var url = '../../../index.html'; var hash = 'org.saddle.groupby.package'; var anchor = window.location.hash; var anchor_opt = ''; if (anchor.length >= 1) anchor_opt = '@' + anchor.substring(1); window.location.href = url + '#' + hash + anchor_opt; } </script> </head> <body class="value"> <div id="definition"> <img src="../../../lib/package_big.png" /> <p id="owner"><a href="../../package.html" class="extype" name="org">org</a>.<a href="../package.html" class="extype" name="org.saddle">saddle</a></p> <h1>groupby</h1> </div> <h4 id="signature" class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">package</span> </span> <span class="symbol"> <span class="name">groupby</span> </span> </h4> <div id="comment" class="fullcommenttop"></div> <div id="mbrsel"> <div id="textfilter"><span class="pre"></span><span class="input"><input id="mbrsel-input" type="text" accesskey="/" /></span><span class="post"></span></div> <div id="visbl"> <span class="filtertype">Visibility</span> <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol> </div> </div> <div id="template"> <div id="allMembers"> <div id="types" class="types members"> <h3>Type Members</h3> <ol><li name="org.saddle.groupby.FrameGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="FrameGrouper[Z,X,Y,T]extendsAnyRef"></a> <a id="FrameGrouper[Z,X,Y,T]:FrameGrouper[Z,X,Y,T]"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">class</span> </span> <span class="symbol"> <a href="FrameGrouper.html"><span class="name">FrameGrouper</span></a><span class="tparams">[<span name="Z">Z</span>, <span name="X">X</span>, <span name="Y">Y</span>, <span name="T">T</span>]</span><span class="result"> extends <span class="extype" name="scala.AnyRef">AnyRef</span></span> </span> </h4> <p class="comment cmt">Helper class to do combine or transform after a groupBy </p> </li><li name="org.saddle.groupby.IndexGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="IndexGrouper[Y]extendsAnyRef"></a> <a id="IndexGrouper[Y]:IndexGrouper[Y]"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">class</span> </span> <span class="symbol"> <a href="IndexGrouper.html"><span class="name">IndexGrouper</span></a><span class="tparams">[<span name="Y">Y</span>]</span><span class="result"> extends <span class="extype" name="scala.AnyRef">AnyRef</span></span> </span> </h4> <p class="comment cmt">Creates groups for each unique key in an index </p> </li><li name="org.saddle.groupby.SeriesGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="SeriesGrouper[Y,X,T]extendsIndexGrouper[Y]"></a> <a id="SeriesGrouper[Y,X,T]:SeriesGrouper[Y,X,T]"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">class</span> </span> <span class="symbol"> <a href="SeriesGrouper.html"><span class="name">SeriesGrouper</span></a><span class="tparams">[<span name="Y">Y</span>, <span name="X">X</span>, <span name="T">T</span>]</span><span class="result"> extends <a href="IndexGrouper.html" class="extype" name="org.saddle.groupby.IndexGrouper">IndexGrouper</a>[<span class="extype" name="org.saddle.groupby.SeriesGrouper.Y">Y</span>]</span> </span> </h4> <p class="comment cmt">Helper class to do combine or transform after a groupBy </p> </li></ol> </div> <div id="values" class="values members"> <h3>Value Members</h3> <ol><li name="org.saddle.groupby.FrameGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="FrameGrouper"></a> <a id="FrameGrouper:FrameGrouper"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">object</span> </span> <span class="symbol"> <a href="FrameGrouper$.html"><span class="name">FrameGrouper</span></a> </span> </h4> </li><li name="org.saddle.groupby.IndexGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="IndexGrouper"></a> <a id="IndexGrouper:IndexGrouper"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">object</span> </span> <span class="symbol"> <a href="IndexGrouper$.html"><span class="name">IndexGrouper</span></a> </span> </h4> </li><li name="org.saddle.groupby.SeriesGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped"> <a id="SeriesGrouper"></a> <a id="SeriesGrouper:SeriesGrouper"></a> <h4 class="signature"> <span class="modifier_kind"> <span class="modifier"></span> <span class="kind">object</span> </span> <span class="symbol"> <a href="SeriesGrouper$.html"><span class="name">SeriesGrouper</span></a> </span> </h4> </li></ol> </div> </div> <div id="inheritedMembers"> </div> <div id="groupedMembers"> <div class="group" name="Ungrouped"> <h3>Ungrouped</h3> </div> </div> </div> <div id="tooltip"></div> <div id="footer"> </div> <script defer="defer" type="text/javascript" id="jquery-js" src="../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" id="jquery-ui-js" src="../../../lib/jquery-ui.js"></script><script defer="defer" type="text/javascript" id="tools-tooltip-js" src="../../../lib/tools.tooltip.js"></script><script defer="defer" type="text/javascript" id="template-js" src="../../../lib/template.js"></script> </body> </html>
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.http.parser; import java.io.IOException; import java.io.StringReader; import org.apache.tomcat.util.collections.ConcurrentCache; /** * Caches the results of parsing content-type headers. */ public class MediaTypeCache { private final ConcurrentCache<String,String[]> cache; public MediaTypeCache(int size) { cache = new ConcurrentCache<>(size); } /** * Looks in the cache and returns the cached value if one is present. If no * match exists in the cache, a new parser is created, the input parsed and * the results placed in the cache and returned to the user. * * @param input The content-type header value to parse * @return The results are provided as a two element String array. The * first element is the media type less the charset and * the second element is the charset */ public String[] parse(String input) { String[] result = cache.get(input); if (result != null) { return result; } MediaType m = null; try { m = MediaType.parseMediaType(new StringReader(input)); } catch (IOException e) { // Ignore - return null } if (m != null) { result = new String[] {m.toStringNoCharset(), m.getCharset()}; cache.put(input, result); } return result; } }
Java
package io.agrest.it.fixture.cayenne; import io.agrest.it.fixture.cayenne.auto._E15E1; public class E15E1 extends _E15E1 { private static final long serialVersionUID = 1L; }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="pt"> <head> <!-- Generated by javadoc (version 1.7.0_71) on Tue Jun 16 10:37:20 BRT 2015 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>opennlp.tools.doccat (Apache OpenNLP Tools 1.6.0 API)</title> <meta name="date" content="2015-06-16"> <link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style"> </head> <body> <h1 class="bar"><a href="../../../opennlp/tools/doccat/package-summary.html" target="classFrame">opennlp.tools.doccat</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="DoccatEvaluationMonitor.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>DoccatEvaluationMonitor</i></a></li> <li><a href="DocumentCategorizer.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>DocumentCategorizer</i></a></li> <li><a href="FeatureGenerator.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>FeatureGenerator</i></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="BagOfWordsFeatureGenerator.html" title="class in opennlp.tools.doccat" target="classFrame">BagOfWordsFeatureGenerator</a></li> <li><a href="DoccatCrossValidator.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatCrossValidator</a></li> <li><a href="DoccatFactory.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatFactory</a></li> <li><a href="DoccatModel.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatModel</a></li> <li><a href="DocumentCategorizerEvaluator.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerEvaluator</a></li> <li><a href="DocumentCategorizerEventStream.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerEventStream</a></li> <li><a href="DocumentCategorizerME.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerME</a></li> <li><a href="DocumentSample.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentSample</a></li> <li><a href="DocumentSampleStream.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentSampleStream</a></li> <li><a href="NGramFeatureGenerator.html" title="class in opennlp.tools.doccat" target="classFrame">NGramFeatureGenerator</a></li> </ul> </div> </body> </html>
Java
<html> <head> <link rel="stylesheet" type="text/css" href="{{{host}}}/stylesheets/bootstrap.min.css"> <script src="{{{host}}}/javascripts/jquery-1.8.3.js"></script> <script src="{{{host}}}/javascripts/oauth2client.js"></script> <script src="{{{host}}}/{{{TILE_NAME}}}/javascripts/action.js"></script> <script> $(document).ready( function() { doIt( '{{{host}}}'); }); </script> </head> <body> <div id="j-card-authentication" class="j-card" style='display: none'> <h2>Expanded Project Information ..</h2> <p> <p>The remote system (Basecamp) requires you to grant access before proceeding</p> <!--- Project: <label id="projectA"><b>Placeholder for Project</b> </label> <br> Description: <label id="descriptionA"><b>Placeholder for Description</b> </label> <br> <a id="BasecampLinkA" href="https://basecamp.com" target="_blank" >Vist this project at Basecamp</a> --> </p> <br><br> <button class="btn btn-primary" id="oauth">Grant Access</button> <button class="btn btn-primary" id="btn_doneA">Exit</button> </div> <div id="j-card-action" class="j-card" style='display: none'> <h3>Expanded Project Information ..</h3> <p> <h5><u>Project:</u></h5><br> <label id="projectB">Placeholder for Project</label> <br><br> <h5><u>Description:</u></h5> &nbsp;&nbsp; <br> <label id="descriptionB">Placeholder for Description</label> <br><br><br> <a id="BasecampLinkB" href="https://basecamp.com" target="_blank" >Vist this project at Basecamp</a> </p> <button class="btn btn-primary" id="btn_done">Close Window</button> </div> </body> </html>
Java
#pragma once #include "generator/collector_interface.hpp" #include <fstream> #include <functional> #include <memory> #include <string> struct OsmElement; namespace base { class GeoObjectId; } // namespace base namespace generator { namespace cache { class IntermediateDataReaderInterface; } // namespace cache // CollectorTag class collects validated value of a tag and saves it to file with following // format: osmId<tab>tagValue. class CollectorTag : public CollectorInterface { public: using Validator = std::function<bool(std::string const & tagValue)>; explicit CollectorTag(std::string const & filename, std::string const & tagKey, Validator const & validator); // CollectorInterface overrides: std::shared_ptr<CollectorInterface> Clone( std::shared_ptr<cache::IntermediateDataReaderInterface> const & = {}) const override; void Collect(OsmElement const & el) override; void Finish() override; void Merge(CollectorInterface const & collector) override; void MergeInto(CollectorTag & collector) const override; protected: void Save() override; void OrderCollectedData() override; private: std::ofstream m_stream; std::string m_tagKey; Validator m_validator; }; } // namespace generator
Java
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import collections import time from enum import Enum from pyflink.datastream import TimerService from pyflink.datastream.timerservice import InternalTimer, K, N, InternalTimerService from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend class InternalTimerImpl(InternalTimer[K, N]): def __init__(self, timestamp: int, key: K, namespace: N): self._timestamp = timestamp self._key = key self._namespace = namespace def get_timestamp(self) -> int: return self._timestamp def get_key(self) -> K: return self._key def get_namespace(self) -> N: return self._namespace def __hash__(self): result = int(self._timestamp ^ (self._timestamp >> 32)) result = 31 * result + hash(tuple(self._key)) result = 31 * result + hash(self._namespace) return result def __eq__(self, other): return self.__class__ == other.__class__ and self._timestamp == other._timestamp \ and self._key == other._key and self._namespace == other._namespace class TimerOperandType(Enum): REGISTER_EVENT_TIMER = 0 REGISTER_PROC_TIMER = 1 DELETE_EVENT_TIMER = 2 DELETE_PROC_TIMER = 3 class InternalTimerServiceImpl(InternalTimerService[N]): """ Internal implementation of InternalTimerService. """ def __init__(self, keyed_state_backend: RemoteKeyedStateBackend): self._keyed_state_backend = keyed_state_backend self._current_watermark = None self.timers = collections.OrderedDict() def current_processing_time(self): return int(time.time() * 1000) def current_watermark(self): return self._current_watermark def advance_watermark(self, watermark: int): self._current_watermark = watermark def register_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def register_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None class TimerServiceImpl(TimerService): """ Internal implementation of TimerService. """ def __init__(self, internal_timer_service: InternalTimerServiceImpl): self._internal = internal_timer_service self.timers = self._internal.timers def current_processing_time(self) -> int: return self._internal.current_processing_time() def current_watermark(self) -> int: return self._internal.current_watermark() def advance_watermark(self, wm): self._internal.advance_watermark(wm) def register_processing_time_timer(self, t: int): self._internal.register_processing_time_timer(None, t) def register_event_time_timer(self, t: int): self._internal.register_event_time_timer(None, t) def delete_processing_time_timer(self, t: int): self._internal.delete_processing_time_timer(None, t) def delete_event_time_timer(self, t: int): self._internal.delete_event_time_timer(None, t)
Java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.error; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields; import static org.assertj.core.util.Sets.newLinkedHashSet; import java.util.LinkedHashSet; import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; import org.assertj.core.presentation.Representation; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.test.Player; import org.assertj.core.util.Sets; import org.junit.jupiter.api.Test; /** * Tests for * <code>{@link ShouldOnlyHaveFields#create(Description, Representation)}</code> * * @author Filip Hrisafov */ public class ShouldOnlyHaveFields_create_Test { private static final LinkedHashSet<String> EMPTY_STRING_SET = Sets.<String> newLinkedHashSet(); @Test public void should_create_error_message_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } @Test public void should_create_error_message_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(String.format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } }
Java
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2001, 2010 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ #include "db_config.h" #include "db_int.h" /* * __os_id -- * Return the current process ID. * * PUBLIC: void __os_id __P((DB_ENV *, pid_t *, db_threadid_t*)); */ void __os_id(dbenv, pidp, tidp) DB_ENV *dbenv; pid_t *pidp; db_threadid_t *tidp; { /* * We can't depend on dbenv not being NULL, this routine is called * from places where there's no DB_ENV handle. * * We cache the pid in the ENV handle, getting the process ID is a * fairly slow call on lots of systems. */ if (pidp != NULL) { if (dbenv == NULL) { #if defined(HAVE_VXWORKS) *pidp = taskIdSelf(); #else *pidp = getpid(); #endif } else *pidp = dbenv->env->pid_cache; } if (tidp != NULL) { #if defined(DB_WIN32) *tidp = GetCurrentThreadId(); #elif defined(HAVE_MUTEX_UI_THREADS) *tidp = thr_self(); #elif defined(HAVE_PTHREAD_SELF) *tidp = pthread_self(); #else /* * Default to just getpid. */ *tidp = 0; #endif } }
Java
@extends('dashboard.main') @section('styles') <meta name="lang" content="{{ \Session::get('locale') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/iCheck/skins/square/blue.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/datetimepicker/jquery.datetimepicker.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/bootstrap-select/dist/css/bootstrap-select.css') }}"> @endsection @section('scripts') <script src="{{ URL::to('libs/vendor/moment/moment.js') }}"></script> <script src="{{ URL::to('libs/vendor/moment/locale/en-gb.js') }}"></script> <script src="{{ URL::to('libs/dashboard/moment-ru.js') }}"></script> <script src="{{ URL::to('libs/vendor/underscore/underscore.js') }}"></script> <script src="{{ URL::to('libs/dashboard/notify.min.js') }}"></script> <script src="{{ URL::to('libs/vendor/bootstrap-select/dist/js/bootstrap-select.js') }}"></script> <script src="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.js') }}"></script> <script src="{{ URL::to('libs/vendor/datetimepicker/build/jquery.datetimepicker.full.js') }}"></script> <script src="{{ URL::asset('libs/vendor/clndr/src/clndr.js') }}"></script> <script src="{{ URL::asset('libs/vendor/iCheck/icheck.js') }}"></script> <script src="{{ URL::to('libs/dashboard/schedule.js') }}"></script> @endsection @section('navigation') @include('dashboard.components.nav') @endsection @section('body-class', 'page-schedule') @section('content') <div id="full-clndr" class="clearfix"> @include('dashboard.components.clndr') </div> <div id="fountainTextG"><div id="fountainTextG_1" class="fountainTextG">L</div><div id="fountainTextG_2" class="fountainTextG">o</div><div id="fountainTextG_3" class="fountainTextG">a</div><div id="fountainTextG_4" class="fountainTextG">d</div><div id="fountainTextG_5" class="fountainTextG">i</div><div id="fountainTextG_6" class="fountainTextG">n</div><div id="fountainTextG_7" class="fountainTextG">g</div></div> <div class="modal fade" tabindex="-1" role="dialog" id="modal"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title">@lang('dashboard.components.scheduler.modal.modalTitle')</h4> </div> <div class="modal-body"> <form> <div class="form-group"> <label for="Title">@lang('dashboard.components.scheduler.modal.title')</label> <input type="text" class="form-control" id="Title" placeholder="@lang('dashboard.components.scheduler.modal.title')" title="@lang('dashboard.components.scheduler.modal.titleTip')"> </div> <div class="form-group"> <label for="Description">@lang('dashboard.components.scheduler.modal.desc')</label> <input type="text" class="form-control" id="Description" placeholder="@lang('dashboard.components.scheduler.modal.desc')"> </div> <div class="form-group"> <label for="playlist">@lang('dashboard.components.scheduler.modal.playlist')</label> <select class="selectpicker form-control" id="playlist" name="playlist"> <option value="0">@lang('dashboard.components.scheduler.modal.emptyPlaylist')</option> </select> </div> <div class="form-group"> <input type="checkbox" id="repeat-day"> <label for="repeat-day">@lang('dashboard.components.scheduler.modal.repeat.everyDay')</label> </div> <div class="form-group"> <input type="checkbox" id="repeat-month"> <label for="repeat-month">@lang('dashboard.components.scheduler.modal.repeat.everyWeek')</label> <div class="form-group" id="repeat-on"> <input type="checkbox" id="repeat-on-mon"> <label for="repeat-on-mon">@lang('dashboard.components.scheduler.modal.repeat.weeks.mon')</label> <input type="checkbox" id="repeat-on-tue"> <label for="repeat-on-tue">@lang('dashboard.components.scheduler.modal.repeat.weeks.tue')</label> <input type="checkbox" id="repeat-on-wed"> <label for="repeat-on-wed">@lang('dashboard.components.scheduler.modal.repeat.weeks.wed')</label> <input type="checkbox" id="repeat-on-thu"> <label for="repeat-on-thu">@lang('dashboard.components.scheduler.modal.repeat.weeks.thu')</label> <input type="checkbox" id="repeat-on-fri"> <label for="repeat-on-fri">@lang('dashboard.components.scheduler.modal.repeat.weeks.fri')</label> <input type="checkbox" id="repeat-on-sat"> <label for="repeat-on-sat">@lang('dashboard.components.scheduler.modal.repeat.weeks.sat')</label> <input type="checkbox" id="repeat-on-sun"> <label for="repeat-on-sun">@lang('dashboard.components.scheduler.modal.repeat.weeks.sun')</label> </div> </div> <div class="form-group"> <label for="datetimepicker">@lang('dashboard.components.scheduler.modal.datetime')</label> <input type="text" class="form-control" id="datetimepicker" placeholder="Datetime" autocomplete="off"> </div> </form> </div> <div class="modal-footer"> <button type="button" class="btn btn-danger pull-left delete-event">@lang('dashboard.core.buttons.delete')</button> <button type="button" class="btn btn-default" data-dismiss="modal">@lang('dashboard.core.buttons.close')</button> <button type="button" class="btn btn-primary save-changes">@lang('dashboard.core.buttons.save')</button> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div><!-- /.modal --> @endsection
Java