patch
stringlengths
17
31.2k
y
int64
1
1
oldf
stringlengths
0
2.21M
idx
int64
1
1
id
int64
4.29k
68.4k
msg
stringlengths
8
843
proj
stringclasses
212 values
lang
stringclasses
9 values
@@ -22,6 +22,11 @@ var ( ErrMinusAmount = errors.New("invalid amount that is minus") ) +const ( + // IotxAddressLength defines length of IoTeX address + IotxAddressLength = 41 +) + // ValidateAddress validates IoTeX address func ValidateAddress(addr string) error { if _, err := address.FromString(addr); err != nil {
1
// Copyright (c) 2019 IoTeX // This is an alpha (internal) release and is not suitable for production. This source code is provided 'as is' and no // warranties are given as to title or non-infringement, merchantability or fitness for purpose and, to the extent // permitted by law, all liability for your use of the code is disclaimed. This source code is governed by Apache // License 2.0 that can be found in the LICENSE file. package validator import ( "errors" "github.com/iotexproject/iotex-core/address" ) // Errors var ( // ErrInvalidAddr indicates error for an invalid address ErrInvalidAddr = errors.New("invalid IoTeX address") // ErrLongName indicates error for a long name more than 40 characters ErrLongName = errors.New("invalid long name that is more than 40 characters") // ErrMinusAmount indicates error for an monus amount ErrMinusAmount = errors.New("invalid amount that is minus") ) // ValidateAddress validates IoTeX address func ValidateAddress(addr string) error { if _, err := address.FromString(addr); err != nil { return ErrInvalidAddr } return nil } // ValidateName validates name for account func ValidateName(name string) error { if len(name) > 40 { return ErrLongName } return nil } // ValidateAmount validates amount for action func ValidateAmount(amount int64) error { if amount < 0 { return ErrMinusAmount } return nil }
1
16,146
IoAddrLen Golang prefers short variable names
iotexproject-iotex-core
go
@@ -34,7 +34,7 @@ class MediaAdmin extends Admin */ public function createQuery($context = 'list') { - $query = $this->getModelManager()->createQuery($this->getClass(), '', $this->root); + $query = $this->getModelManager()->createQuery($this->getClass(), 'a', $this->root); foreach ($this->extensions as $extension) { $extension->configureQuery($this, $query, $context);
1
<?php /* * This file is part of the Sonata package. * * (c) Thomas Rabaix <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Sonata\MediaBundle\Admin\PHPCR; use Sonata\MediaBundle\Admin\BaseMediaAdmin as Admin; use Sonata\AdminBundle\Datagrid\DatagridMapper; use Sonata\AdminBundle\Route\RouteCollection; class MediaAdmin extends Admin { /** * Path to the root node of media documents. * * @var string */ protected $root; public function setRoot($root) { $this->root = $root; } /** * {@inheritdoc} */ public function createQuery($context = 'list') { $query = $this->getModelManager()->createQuery($this->getClass(), '', $this->root); foreach ($this->extensions as $extension) { $extension->configureQuery($this, $query, $context); } return $query; } public function id($object) { return $this->getUrlsafeIdentifier($object); } /** * @param \Sonata\AdminBundle\Datagrid\DatagridMapper $datagridMapper * @return void */ protected function configureDatagridFilters(DatagridMapper $datagridMapper) { // TODO disabled filter due to no attached service for filter types: string, checkbox // $datagridMapper // ->add('name') // ->add('providerReference') // ->add('enabled') // ->add('context') // ; } /** * {@inheritdoc} */ protected function configureRoutes(RouteCollection $collection) { // Allow path in id parameter $collection->add('view', $this->getRouterIdParameter() . '/view', array(), array('id' => '.+', '_method' => 'GET')); $collection->add('show', $this->getRouterIdParameter() . '/show', array( '_controller' => sprintf('%s:%s', $this->baseControllerName, 'view') ), array('id' => '.+', '_method' => 'GET') ); } }
1
6,385
PHPCR-ODM does not allow blank aliases..
sonata-project-SonataMediaBundle
php
@@ -358,7 +358,11 @@ public class HttpSolrClient extends BaseHttpSolrClient { if (parser == null) { parser = this.parser; } - + + Header[] contextHeaders = new Header[2]; + contextHeaders[0] = new BasicHeader(CommonParams.SOLR_REQUEST_CONTEXT_PARAM, getContext().toString()); + contextHeaders[1] = new BasicHeader(CommonParams.SOLR_REQUEST_TYPE_PARAM, request.getRequestType()); + // The parser 'wt=' and 'version=' params are used instead of the original // params ModifiableSolrParams wparams = new ModifiableSolrParams(params);
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.impl; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.lang.invoke.MethodHandles; import java.net.ConnectException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.Principal; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.NameValuePair; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.entity.BasicHttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.mime.FormBodyPart; import org.apache.http.entity.mime.HttpMultipartMode; import org.apache.http.entity.mime.MultipartEntity; import org.apache.http.entity.mime.content.InputStreamBody; import org.apache.http.entity.mime.content.StringBody; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.V2RequestSupport; import org.apache.solr.client.solrj.request.RequestWriter; import org.apache.solr.client.solrj.request.V2Request; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.Base64; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; import static org.apache.solr.common.util.Utils.getObjectByPath; /** * A SolrClient implementation that talks directly to a Solr server via HTTP */ public class HttpSolrClient extends BaseHttpSolrClient { private static final Charset FALLBACK_CHARSET = StandardCharsets.UTF_8; private static final String DEFAULT_PATH = "/select"; private static final long serialVersionUID = -946812319974801896L; /** * User-Agent String. */ public static final String AGENT = "Solr[" + HttpSolrClient.class.getName() + "] 1.0"; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); static final Class<HttpSolrClient> cacheKey = HttpSolrClient.class; /** * The URL of the Solr server. */ protected volatile String baseUrl; /** * Default value: null / empty. * <p> * Parameters that are added to every request regardless. This may be a place * to add something like an authentication token. */ protected ModifiableSolrParams invariantParams; /** * Default response parser is BinaryResponseParser * <p> * This parser represents the default Response Parser chosen to parse the * response if the parser were not specified as part of the request. * * @see org.apache.solr.client.solrj.impl.BinaryResponseParser */ protected volatile ResponseParser parser; /** * The RequestWriter used to write all requests to Solr * * @see org.apache.solr.client.solrj.request.RequestWriter */ protected volatile RequestWriter requestWriter = new BinaryRequestWriter(); private final HttpClient httpClient; private volatile Boolean followRedirects = false; private volatile boolean useMultiPartPost; private final boolean internalClient; private volatile Set<String> queryParams = Collections.emptySet(); private volatile Integer connectionTimeout; private volatile Integer soTimeout; /** * @deprecated use {@link HttpSolrClient#HttpSolrClient(Builder)} instead, as it is a more extension/subclassing-friendly alternative */ @Deprecated protected HttpSolrClient(String baseURL, HttpClient client, ResponseParser parser, boolean allowCompression) { this(new Builder(baseURL) .withHttpClient(client) .withResponseParser(parser) .allowCompression(allowCompression)); } /** * The constructor. * * @param baseURL The base url to communicate with the Solr server * @param client Http client instance to use for communication * @param parser Response parser instance to use to decode response from Solr server * @param allowCompression Should compression be allowed ? * @param invariantParams The parameters which should be included with every request. * * @deprecated use {@link HttpSolrClient#HttpSolrClient(Builder)} instead, as it is a more extension/subclassing-friendly alternative */ @Deprecated protected HttpSolrClient(String baseURL, HttpClient client, ResponseParser parser, boolean allowCompression, ModifiableSolrParams invariantParams) { this(new Builder(baseURL) .withHttpClient(client) .withResponseParser(parser) .allowCompression(allowCompression) .withInvariantParams(invariantParams)); } protected HttpSolrClient(Builder builder) { this.baseUrl = builder.baseSolrUrl; if (baseUrl.endsWith("/")) { baseUrl = baseUrl.substring(0, baseUrl.length() - 1); } if (baseUrl.indexOf('?') >= 0) { throw new RuntimeException( "Invalid base url for solrj. The base URL must not contain parameters: " + baseUrl); } if (builder.httpClient != null) { this.httpClient = builder.httpClient; this.internalClient = false; } else { this.internalClient = true; ModifiableSolrParams params = new ModifiableSolrParams(); params.set(HttpClientUtil.PROP_FOLLOW_REDIRECTS, followRedirects); params.set(HttpClientUtil.PROP_ALLOW_COMPRESSION, builder.compression); httpClient = HttpClientUtil.createClient(params); } this.parser = builder.responseParser; this.invariantParams = builder.invariantParams; this.connectionTimeout = builder.connectionTimeoutMillis; this.soTimeout = builder.socketTimeoutMillis; } public Set<String> getQueryParams() { return queryParams; } /** * Expert Method * @param queryParams set of param keys to only send via the query string * Note that the param will be sent as a query string if the key is part * of this Set or the SolrRequest's query params. * @see org.apache.solr.client.solrj.SolrRequest#getQueryParams */ public void setQueryParams(Set<String> queryParams) { this.queryParams = queryParams; } /** * Process the request. If * {@link org.apache.solr.client.solrj.SolrRequest#getResponseParser()} is * null, then use {@link #getParser()} * * @param request * The {@link org.apache.solr.client.solrj.SolrRequest} to process * @return The {@link org.apache.solr.common.util.NamedList} result * @throws IOException If there is a low-level I/O error. * * @see #request(org.apache.solr.client.solrj.SolrRequest, * org.apache.solr.client.solrj.ResponseParser) */ @Override public NamedList<Object> request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException { ResponseParser responseParser = request.getResponseParser(); if (responseParser == null) { responseParser = parser; } return request(request, responseParser, collection); } public NamedList<Object> request(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { return request(request, processor, null); } public NamedList<Object> request(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor, String collection) throws SolrServerException, IOException { HttpRequestBase method = createMethod(request, collection); setBasicAuthHeader(request, method); if (request.getHeaders() != null) { @SuppressWarnings({"unchecked"}) Map<String, String> headers = request.getHeaders(); for (Map.Entry<String, String> entry : headers.entrySet()) { method.setHeader(entry.getKey(), entry.getValue()); } } return executeMethod(method, request.getUserPrincipal(), processor, isV2ApiRequest(request)); } private boolean isV2ApiRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request) { return request instanceof V2Request || request.getPath().contains("/____v2"); } private void setBasicAuthHeader(@SuppressWarnings({"rawtypes"})SolrRequest request, HttpRequestBase method) throws UnsupportedEncodingException { if (request.getBasicAuthUser() != null && request.getBasicAuthPassword() != null) { String userPass = request.getBasicAuthUser() + ":" + request.getBasicAuthPassword(); String encoded = Base64.byteArrayToBase64(userPass.getBytes(FALLBACK_CHARSET)); method.setHeader(new BasicHeader("Authorization", "Basic " + encoded)); } } /** * @lucene.experimental */ public static class HttpUriRequestResponse { public HttpUriRequest httpUriRequest; public Future<NamedList<Object>> future; } /** * @lucene.experimental */ public HttpUriRequestResponse httpUriRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request) throws SolrServerException, IOException { ResponseParser responseParser = request.getResponseParser(); if (responseParser == null) { responseParser = parser; } return httpUriRequest(request, responseParser); } /** * @lucene.experimental */ public HttpUriRequestResponse httpUriRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { HttpUriRequestResponse mrr = new HttpUriRequestResponse(); final HttpRequestBase method = createMethod(request, null); ExecutorService pool = ExecutorUtil.newMDCAwareFixedThreadPool(1, new SolrNamedThreadFactory("httpUriRequest")); try { MDC.put("HttpSolrClient.url", baseUrl); mrr.future = pool.submit(() -> executeMethod(method, request.getUserPrincipal(), processor, isV2ApiRequest(request))); } finally { pool.shutdown(); MDC.remove("HttpSolrClient.url"); } assert method != null; mrr.httpUriRequest = method; return mrr; } protected ModifiableSolrParams calculateQueryParams(Set<String> queryParamNames, ModifiableSolrParams wparams) { ModifiableSolrParams queryModParams = new ModifiableSolrParams(); if (queryParamNames != null) { for (String param : queryParamNames) { String[] value = wparams.getParams(param) ; if (value != null) { for (String v : value) { queryModParams.add(param, v); } wparams.remove(param); } } } return queryModParams; } static String changeV2RequestEndpoint(String basePath) throws MalformedURLException { URL oldURL = new URL(basePath); String newPath = oldURL.getPath().replaceFirst("/solr", "/api"); return new URL(oldURL.getProtocol(), oldURL.getHost(), oldURL.getPort(), newPath).toString(); } @SuppressWarnings({"unchecked"}) protected HttpRequestBase createMethod(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) throws IOException, SolrServerException { if (request instanceof V2RequestSupport) { request = ((V2RequestSupport) request).getV2Request(); } SolrParams params = request.getParams(); RequestWriter.ContentWriter contentWriter = requestWriter.getContentWriter(request); Collection<ContentStream> streams = contentWriter == null ? requestWriter.getContentStreams(request) : null; String path = requestWriter.getPath(request); if (path == null || !path.startsWith("/")) { path = DEFAULT_PATH; } ResponseParser parser = request.getResponseParser(); if (parser == null) { parser = this.parser; } // The parser 'wt=' and 'version=' params are used instead of the original // params ModifiableSolrParams wparams = new ModifiableSolrParams(params); if (parser != null) { wparams.set(CommonParams.WT, parser.getWriterType()); wparams.set(CommonParams.VERSION, parser.getVersion()); } if (invariantParams != null) { wparams.add(invariantParams); } String basePath = baseUrl; if (collection != null) basePath += "/" + collection; if (request instanceof V2Request) { if (System.getProperty("solr.v2RealPath") == null || ((V2Request) request).isForceV2()) { basePath = baseUrl.replace("/solr", "/api"); } else { basePath = baseUrl + "/____v2"; } } if (SolrRequest.METHOD.GET == request.getMethod()) { if (streams != null || contentWriter != null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "GET can't send streams!"); } return new HttpGet(basePath + path + wparams.toQueryString()); } if (SolrRequest.METHOD.DELETE == request.getMethod()) { return new HttpDelete(basePath + path + wparams.toQueryString()); } if (SolrRequest.METHOD.POST == request.getMethod() || SolrRequest.METHOD.PUT == request.getMethod()) { String url = basePath + path; boolean hasNullStreamName = false; if (streams != null) { for (ContentStream cs : streams) { if (cs.getName() == null) { hasNullStreamName = true; break; } } } boolean isMultipart = ((this.useMultiPartPost && SolrRequest.METHOD.POST == request.getMethod()) || (streams != null && streams.size() > 1)) && !hasNullStreamName; LinkedList<NameValuePair> postOrPutParams = new LinkedList<>(); if(contentWriter != null) { String fullQueryUrl = url + wparams.toQueryString(); HttpEntityEnclosingRequestBase postOrPut = SolrRequest.METHOD.POST == request.getMethod() ? new HttpPost(fullQueryUrl) : new HttpPut(fullQueryUrl); postOrPut.addHeader("Content-Type", contentWriter.getContentType()); postOrPut.setEntity(new BasicHttpEntity(){ @Override public boolean isStreaming() { return true; } @Override public void writeTo(OutputStream outstream) throws IOException { contentWriter.write(outstream); } }); return postOrPut; } else if (streams == null || isMultipart) { // send server list and request list as query string params ModifiableSolrParams queryParams = calculateQueryParams(this.queryParams, wparams); queryParams.add(calculateQueryParams(request.getQueryParams(), wparams)); String fullQueryUrl = url + queryParams.toQueryString(); HttpEntityEnclosingRequestBase postOrPut = fillContentStream(request, streams, wparams, isMultipart, postOrPutParams, fullQueryUrl); return postOrPut; } // It is has one stream, it is the post body, put the params in the URL else { String fullQueryUrl = url + wparams.toQueryString(); HttpEntityEnclosingRequestBase postOrPut = SolrRequest.METHOD.POST == request.getMethod() ? new HttpPost(fullQueryUrl) : new HttpPut(fullQueryUrl); fillSingleContentStream(streams, postOrPut); return postOrPut; } } throw new SolrServerException("Unsupported method: " + request.getMethod()); } private void fillSingleContentStream(Collection<ContentStream> streams, HttpEntityEnclosingRequestBase postOrPut) throws IOException { // Single stream as body // Using a loop just to get the first one final ContentStream[] contentStream = new ContentStream[1]; for (ContentStream content : streams) { contentStream[0] = content; break; } Long size = contentStream[0].getSize(); postOrPut.setEntity(new InputStreamEntity(contentStream[0].getStream(), size == null ? -1 : size) { @Override public Header getContentType() { return new BasicHeader("Content-Type", contentStream[0].getContentType()); } @Override public boolean isRepeatable() { return false; } }); } private HttpEntityEnclosingRequestBase fillContentStream( @SuppressWarnings({"rawtypes"})SolrRequest request, Collection<ContentStream> streams, ModifiableSolrParams wparams, boolean isMultipart, LinkedList<NameValuePair> postOrPutParams, String fullQueryUrl) throws IOException { HttpEntityEnclosingRequestBase postOrPut = SolrRequest.METHOD.POST == request.getMethod() ? new HttpPost(fullQueryUrl) : new HttpPut(fullQueryUrl); if (!isMultipart) { postOrPut.addHeader("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8"); } List<FormBodyPart> parts = new LinkedList<>(); Iterator<String> iter = wparams.getParameterNamesIterator(); while (iter.hasNext()) { String p = iter.next(); String[] vals = wparams.getParams(p); if (vals != null) { for (String v : vals) { if (isMultipart) { parts.add(new FormBodyPart(p, new StringBody(v, StandardCharsets.UTF_8))); } else { postOrPutParams.add(new BasicNameValuePair(p, v)); } } } } // TODO: remove deprecated - first simple attempt failed, see {@link MultipartEntityBuilder} if (isMultipart && streams != null) { for (ContentStream content : streams) { String contentType = content.getContentType(); if (contentType == null) { contentType = BinaryResponseParser.BINARY_CONTENT_TYPE; // default } String name = content.getName(); if (name == null) { name = ""; } parts.add(new FormBodyPart(name, new InputStreamBody( content.getStream(), ContentType.parse(contentType), content.getName()))); } } if (parts.size() > 0) { MultipartEntity entity = new MultipartEntity(HttpMultipartMode.STRICT); for (FormBodyPart p : parts) { entity.addPart(p); } postOrPut.setEntity(entity); } else { //not using multipart postOrPut.setEntity(new UrlEncodedFormEntity(postOrPutParams, StandardCharsets.UTF_8)); } return postOrPut; } private static final List<String> errPath = Arrays.asList("metadata", "error-class");//Utils.getObjectByPath(err, false,"metadata/error-class") @SuppressWarnings({"unchecked", "rawtypes"}) protected NamedList<Object> executeMethod(HttpRequestBase method, Principal userPrincipal, final ResponseParser processor, final boolean isV2Api) throws SolrServerException { method.addHeader("User-Agent", AGENT); org.apache.http.client.config.RequestConfig.Builder requestConfigBuilder = HttpClientUtil.createDefaultRequestConfigBuilder(); if (soTimeout != null) { requestConfigBuilder.setSocketTimeout(soTimeout); } if (connectionTimeout != null) { requestConfigBuilder.setConnectTimeout(connectionTimeout); } if (followRedirects != null) { requestConfigBuilder.setRedirectsEnabled(followRedirects); } method.setConfig(requestConfigBuilder.build()); HttpEntity entity = null; InputStream respBody = null; boolean shouldClose = true; try { // Execute the method. HttpClientContext httpClientRequestContext = HttpClientUtil.createNewHttpClientRequestContext(); if (userPrincipal != null) { // Normally the context contains a static userToken to enable reuse resources. // However, if a personal Principal object exists, we use that instead, also as a means // to transfer authentication information to Auth plugins that wish to intercept the request later httpClientRequestContext.setUserToken(userPrincipal); } final HttpResponse response = httpClient.execute(method, httpClientRequestContext); int httpStatus = response.getStatusLine().getStatusCode(); // Read the contents entity = response.getEntity(); respBody = entity.getContent(); String mimeType = null; Charset charset = null; String charsetName = null; ContentType contentType = ContentType.get(entity); if (contentType != null) { mimeType = contentType.getMimeType().trim().toLowerCase(Locale.ROOT); charset = contentType.getCharset(); if (charset != null) { charsetName = charset.name(); } } // handle some http level checks before trying to parse the response switch (httpStatus) { case HttpStatus.SC_OK: case HttpStatus.SC_BAD_REQUEST: case HttpStatus.SC_CONFLICT: // 409 break; case HttpStatus.SC_MOVED_PERMANENTLY: case HttpStatus.SC_MOVED_TEMPORARILY: if (!followRedirects) { throw new SolrServerException("Server at " + getBaseURL() + " sent back a redirect (" + httpStatus + ")."); } break; default: if (processor == null || contentType == null) { throw new RemoteSolrException(baseUrl, httpStatus, "non ok status: " + httpStatus + ", message:" + response.getStatusLine().getReasonPhrase(), null); } } if (processor == null || processor instanceof InputStreamResponseParser) { // no processor specified, return raw stream NamedList<Object> rsp = new NamedList<>(); rsp.add("stream", respBody); rsp.add("closeableResponse", response); // Only case where stream should not be closed shouldClose = false; return rsp; } String procCt = processor.getContentType(); if (procCt != null) { String procMimeType = ContentType.parse(procCt).getMimeType().trim().toLowerCase(Locale.ROOT); if (!procMimeType.equals(mimeType)) { // unexpected mime type String msg = "Expected mime type " + procMimeType + " but got " + mimeType + "."; Charset exceptionCharset = charset != null? charset : FALLBACK_CHARSET; try { msg = msg + " " + IOUtils.toString(respBody, exceptionCharset); } catch (IOException e) { throw new RemoteSolrException(baseUrl, httpStatus, "Could not parse response with encoding " + exceptionCharset, e); } throw new RemoteSolrException(baseUrl, httpStatus, msg, null); } } NamedList<Object> rsp = null; try { rsp = processor.processResponse(respBody, charsetName); } catch (Exception e) { throw new RemoteSolrException(baseUrl, httpStatus, e.getMessage(), e); } Object error = rsp == null ? null : rsp.get("error"); if (error != null && (isV2Api || String.valueOf(getObjectByPath(error, true, errPath)).endsWith("ExceptionWithErrObject"))) { throw RemoteExecutionException.create(baseUrl, rsp); } if (httpStatus != HttpStatus.SC_OK && !isV2Api) { NamedList<String> metadata = null; String reason = null; try { if (error != null) { reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("msg")); if(reason == null) { reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("trace")); } Object metadataObj = Utils.getObjectByPath(error, false, Collections.singletonList("metadata")); if (metadataObj instanceof NamedList) { metadata = (NamedList<String>) metadataObj; } else if (metadataObj instanceof List) { // NamedList parsed as List convert to NamedList again List<Object> list = (List<Object>) metadataObj; metadata = new NamedList<>(list.size()/2); for (int i = 0; i < list.size(); i+=2) { metadata.add((String)list.get(i), (String) list.get(i+1)); } } else if (metadataObj instanceof Map) { metadata = new NamedList((Map) metadataObj); } } } catch (Exception ex) {} if (reason == null) { StringBuilder msg = new StringBuilder(); msg.append(response.getStatusLine().getReasonPhrase()) .append("\n\n") .append("request: ") .append(method.getURI()); reason = java.net.URLDecoder.decode(msg.toString(), FALLBACK_CHARSET); } RemoteSolrException rss = new RemoteSolrException(baseUrl, httpStatus, reason, null); if (metadata != null) rss.setMetadata(metadata); throw rss; } return rsp; } catch (ConnectException e) { throw new SolrServerException("Server refused connection at: " + getBaseURL(), e); } catch (SocketTimeoutException e) { throw new SolrServerException( "Timeout occurred while waiting response from server at: " + getBaseURL(), e); } catch (IOException e) { throw new SolrServerException( "IOException occurred when talking to server at: " + getBaseURL(), e); } finally { if (shouldClose) { Utils.consumeFully(entity); } } } // ------------------------------------------------------------------- // ------------------------------------------------------------------- /** * Retrieve the default list of parameters are added to every request * regardless. * * @see #invariantParams */ public ModifiableSolrParams getInvariantParams() { return invariantParams; } public String getBaseURL() { return baseUrl; } /** * Change the base-url used when sending requests to Solr. * * Two different paths can be specified as a part of this URL: * * 1) A path pointing directly at a particular core * <pre> * httpSolrClient.setBaseURL("http://my-solr-server:8983/solr/core1"); * QueryResponse resp = httpSolrClient.query(new SolrQuery("*:*")); * </pre> * Note that when a core is provided in the base URL, queries and other requests can be made without mentioning the * core explicitly. However, the client can only send requests to that core. * * 2) The path of the root Solr path ("/solr") * <pre> * httpSolrClient.setBaseURL("http://my-solr-server:8983/solr"); * QueryResponse resp = httpSolrClient.query("core1", new SolrQuery("*:*")); * </pre> * In this case the client is more flexible and can be used to send requests to any cores. The cost of this is that * the core must be specified on each request. */ public void setBaseURL(String baseURL) { this.baseUrl = baseURL; } public ResponseParser getParser() { return parser; } /** * Note: This setter method is <b>not thread-safe</b>. * * @param processor * Default Response Parser chosen to parse the response if the parser * were not specified as part of the request. * @see org.apache.solr.client.solrj.SolrRequest#getResponseParser() */ public void setParser(ResponseParser processor) { parser = processor; } /** * Return the HttpClient this instance uses. */ public HttpClient getHttpClient() { return httpClient; } /** * HttpConnectionParams.setConnectionTimeout * * @param timeout * Timeout in milliseconds * * @deprecated since 7.0 Use {@link Builder} methods instead. */ @Deprecated public void setConnectionTimeout(int timeout) { this.connectionTimeout = timeout; } /** * Set SoTimeout (read timeout). This is desirable * for queries, but probably not for indexing. * * @param timeout * Timeout in milliseconds * s * @deprecated since 7.0 Use {@link Builder} methods instead. */ @Deprecated public void setSoTimeout(int timeout) { this.soTimeout = timeout; } /** * Configure whether the client should follow redirects or not. * <p> * This defaults to false under the assumption that if you are following a * redirect to get to a Solr installation, something is misconfigured * somewhere. * </p> */ public void setFollowRedirects(boolean followRedirects) { this.followRedirects = followRedirects; } public void setRequestWriter(RequestWriter requestWriter) { this.requestWriter = requestWriter; } /** * Close the {@link HttpClientConnectionManager} from the internal client. */ @Override public void close() throws IOException { if (httpClient != null && internalClient) { HttpClientUtil.close(httpClient); } } public boolean isUseMultiPartPost() { return useMultiPartPost; } /** * Set the multipart connection properties */ public void setUseMultiPartPost(boolean useMultiPartPost) { this.useMultiPartPost = useMultiPartPost; } /** * Constructs {@link HttpSolrClient} instances from provided configuration. */ public static class Builder extends SolrClientBuilder<Builder> { protected String baseSolrUrl; protected boolean compression; protected ModifiableSolrParams invariantParams = new ModifiableSolrParams(); public Builder() { this.responseParser = new BinaryResponseParser(); } /** * Specify the base-url for the created client to use when sending requests to Solr. * * Two different paths can be specified as a part of this URL: * * 1) A path pointing directly at a particular core * <pre> * SolrClient client = builder.withBaseSolrUrl("http://my-solr-server:8983/solr/core1").build(); * QueryResponse resp = client.query(new SolrQuery("*:*")); * </pre> * Note that when a core is provided in the base URL, queries and other requests can be made without mentioning the * core explicitly. However, the client can only send requests to that core. * * 2) The path of the root Solr path ("/solr") * <pre> * SolrClient client = builder.withBaseSolrUrl("http://my-solr-server:8983/solr").build(); * QueryResponse resp = client.query("core1", new SolrQuery("*:*")); * </pre> * In this case the client is more flexible and can be used to send requests to any cores. This flexibility though * requires that the core is specified on all requests. */ public Builder withBaseSolrUrl(String baseSolrUrl) { this.baseSolrUrl = baseSolrUrl; return this; } /** * Create a Builder object, based on the provided Solr URL. * * Two different paths can be specified as a part of this URL: * * 1) A path pointing directly at a particular core * <pre> * SolrClient client = new HttpSolrClient.Builder("http://my-solr-server:8983/solr/core1").build(); * QueryResponse resp = client.query(new SolrQuery("*:*")); * </pre> * Note that when a core is provided in the base URL, queries and other requests can be made without mentioning the * core explicitly. However, the client can only send requests to that core. * * 2) The path of the root Solr path ("/solr") * <pre> * SolrClient client = new HttpSolrClient.Builder("http://my-solr-server:8983/solr").build(); * QueryResponse resp = client.query("core1", new SolrQuery("*:*")); * </pre> * In this case the client is more flexible and can be used to send requests to any cores. This flexibility though * requires that the core be specified on all requests. * * By default, compression is not enabled on created HttpSolrClient objects. */ public Builder(String baseSolrUrl) { this.baseSolrUrl = baseSolrUrl; this.responseParser = new BinaryResponseParser(); } /** * Chooses whether created {@link HttpSolrClient}s use compression by default. */ public Builder allowCompression(boolean compression) { this.compression = compression; return this; } /** * Use a delegation token for authenticating via the KerberosPlugin */ public Builder withKerberosDelegationToken(String delegationToken) { if (this.invariantParams.get(DelegationTokenHttpSolrClient.DELEGATION_TOKEN_PARAM) != null) { throw new IllegalStateException(DelegationTokenHttpSolrClient.DELEGATION_TOKEN_PARAM + " is already defined!"); } this.invariantParams.add(DelegationTokenHttpSolrClient.DELEGATION_TOKEN_PARAM, delegationToken); return this; } /** * Adds to the set of params that the created {@link HttpSolrClient} will add on all requests * * @param params a set of parameters to add to the invariant-params list. These params must be unique and may not * duplicate a param already in the invariant list. */ public Builder withInvariantParams(ModifiableSolrParams params) { Objects.requireNonNull(params, "params must be non null!"); for (String name : params.getParameterNames()) { if (this.invariantParams.get(name) != null) { throw new IllegalStateException("parameter " + name + " is redefined."); } } this.invariantParams.add(params); return this; } /** * Create a {@link HttpSolrClient} based on provided configuration. */ public HttpSolrClient build() { if (baseSolrUrl == null) { throw new IllegalArgumentException("Cannot create HttpSolrClient without a valid baseSolrUrl!"); } if (this.invariantParams.get(DelegationTokenHttpSolrClient.DELEGATION_TOKEN_PARAM) == null) { return new HttpSolrClient(this); } else { return new DelegationTokenHttpSolrClient(this); } } @Override public Builder getThis() { return this; } } }
1
35,714
This seems trivial to forge. Can we tie this into the authentication code paths somehow so that we can really trust that the request is coming from a server instead of just some application claiming to be a server?
apache-lucene-solr
java
@@ -42,6 +42,7 @@ //@HEADER */ +#include <Kokkos_Core.hpp> #include <Kokkos_Concepts.hpp> #include <SYCL/Kokkos_SYCL_Instance.hpp> #include <KokkosCore_Config_DeclareBackend.hpp>
1
/* //@HEADER // ************************************************************************ // // Kokkos v. 3.0 // Copyright (2020) National Technology & Engineering // Solutions of Sandia, LLC (NTESS). // // Under the terms of Contract DE-NA0003525 with NTESS, // the U.S. Government retains certain rights in this software. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of the Corporation nor the names of the // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY NTESS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL NTESS OR THE // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Questions? Contact Christian R. Trott ([email protected]) // // ************************************************************************ //@HEADER */ #include <Kokkos_Concepts.hpp> #include <SYCL/Kokkos_SYCL_Instance.hpp> #include <KokkosCore_Config_DeclareBackend.hpp> #include <Kokkos_SYCL.hpp> #include <Kokkos_HostSpace.hpp> #include <Kokkos_Serial.hpp> #include <impl/Kokkos_ConcurrentBitset.hpp> #include <impl/Kokkos_Error.hpp> namespace Kokkos { namespace Experimental { namespace Impl { std::vector<std::optional<sycl::queue>*> SYCLInternal::all_queues; std::mutex SYCLInternal::mutex; SYCLInternal::~SYCLInternal() { if (!was_finalized || m_scratchSpace || m_scratchFlags || m_scratchConcurrentBitset) { std::cerr << "Kokkos::Experimental::SYCL ERROR: Failed to call " "Kokkos::Experimental::SYCL::finalize()" << std::endl; std::cerr.flush(); } } int SYCLInternal::verify_is_initialized(const char* const label) const { if (!is_initialized()) { std::cerr << "Kokkos::Experimental::SYCL::" << label << " : ERROR device not initialized" << std::endl; } return is_initialized(); } SYCLInternal& SYCLInternal::singleton() { static SYCLInternal self; return self; } void SYCLInternal::initialize(const sycl::device& d) { auto exception_handler = [](sycl::exception_list exceptions) { bool asynchronous_error = false; for (std::exception_ptr const& e : exceptions) { try { std::rethrow_exception(e); } catch (sycl::exception const& e) { std::cerr << e.what() << '\n'; asynchronous_error = true; } } if (asynchronous_error) Kokkos::Impl::throw_runtime_exception( "There was an asynchronous SYCL error!\n"); }; initialize(sycl::queue{d, exception_handler}); } // FIXME_SYCL void SYCLInternal::initialize(const sycl::queue& q) { if (was_finalized) Kokkos::abort("Calling SYCL::initialize after SYCL::finalize is illegal\n"); if (is_initialized()) return; if (!HostSpace::execution_space::impl_is_initialized()) { const std::string msg( "SYCL::initialize ERROR : HostSpace::execution_space is not " "initialized"); Kokkos::Impl::throw_runtime_exception(msg); } const bool ok_init = nullptr == m_scratchSpace || nullptr == m_scratchFlags; const bool ok_dev = true; if (ok_init && ok_dev) { m_queue = q; // guard pushing to all_queues { std::lock_guard<std::mutex> lock(mutex); all_queues.push_back(&m_queue); } const sycl::device& d = m_queue->get_device(); std::cout << SYCL::SYCLDevice(d) << '\n'; m_maxWorkgroupSize = d.template get_info<sycl::info::device::max_work_group_size>(); // FIXME_SYCL this should give the correct value for NVIDIA GPUs m_maxConcurrency = m_maxWorkgroupSize * 2 * d.template get_info<sycl::info::device::max_compute_units>(); // Setup concurent bitset for obtaining unique tokens from within an // executing kernel. { const int32_t buffer_bound = Kokkos::Impl::concurrent_bitset::buffer_bound(m_maxConcurrency); using Record = Kokkos::Impl::SharedAllocationRecord< Kokkos::Experimental::SYCLDeviceUSMSpace, void>; Record* const r = Record::allocate(Kokkos::Experimental::SYCLDeviceUSMSpace(*m_queue), "Kokkos::SYCL::InternalScratchBitset", sizeof(uint32_t) * buffer_bound); Record::increment(r); m_scratchConcurrentBitset = reinterpret_cast<uint32_t*>(r->data()); auto event = m_queue->memset(m_scratchConcurrentBitset, 0, sizeof(uint32_t) * buffer_bound); fence(event); } m_maxShmemPerBlock = d.template get_info<sycl::info::device::local_mem_size>(); m_indirectKernelMem.reset(*m_queue); m_indirectReducerMem.reset(*m_queue); } else { std::ostringstream msg; msg << "Kokkos::Experimental::SYCL::initialize(...) FAILED"; if (!ok_init) { msg << " : Already initialized"; } Kokkos::Impl::throw_runtime_exception(msg.str()); } } void SYCLInternal::finalize() { SYCL().fence(); was_finalized = true; using RecordSYCL = Kokkos::Impl::SharedAllocationRecord<SYCLDeviceUSMSpace>; if (nullptr != m_scratchSpace) RecordSYCL::decrement(RecordSYCL::get_record(m_scratchSpace)); if (nullptr != m_scratchFlags) RecordSYCL::decrement(RecordSYCL::get_record(m_scratchFlags)); m_syclDev = -1; m_scratchSpaceCount = 0; m_scratchSpace = nullptr; m_scratchFlagsCount = 0; m_scratchFlags = nullptr; RecordSYCL::decrement(RecordSYCL::get_record(m_scratchConcurrentBitset)); m_scratchConcurrentBitset = nullptr; m_indirectKernelMem.reset(); m_indirectReducerMem.reset(); // guard erasing from all_queues { std::lock_guard<std::mutex> lock(mutex); all_queues.erase(std::find(all_queues.begin(), all_queues.end(), &m_queue)); } m_queue.reset(); } void* SYCLInternal::scratch_space( const Kokkos::Experimental::SYCL::size_type size) { const size_type sizeScratchGrain = sizeof(Kokkos::Experimental::SYCL::size_type); if (verify_is_initialized("scratch_space") && m_scratchSpaceCount * sizeScratchGrain < size) { m_scratchSpaceCount = (size + sizeScratchGrain - 1) / sizeScratchGrain; using Record = Kokkos::Impl::SharedAllocationRecord< Kokkos::Experimental::SYCLDeviceUSMSpace, void>; if (nullptr != m_scratchSpace) Record::decrement(Record::get_record(m_scratchSpace)); Record* const r = Record::allocate(Kokkos::Experimental::SYCLDeviceUSMSpace(*m_queue), "Kokkos::InternalScratchSpace", (sizeScratchGrain * m_scratchSpaceCount)); Record::increment(r); m_scratchSpace = reinterpret_cast<size_type*>(r->data()); } return m_scratchSpace; } void* SYCLInternal::scratch_flags( const Kokkos::Experimental::SYCL::size_type size) { const size_type sizeScratchGrain = sizeof(Kokkos::Experimental::SYCL::size_type); if (verify_is_initialized("scratch_flags") && m_scratchFlagsCount * sizeScratchGrain < size) { m_scratchFlagsCount = (size + sizeScratchGrain - 1) / sizeScratchGrain; using Record = Kokkos::Impl::SharedAllocationRecord< Kokkos::Experimental::SYCLDeviceUSMSpace, void>; if (nullptr != m_scratchFlags) Record::decrement(Record::get_record(m_scratchFlags)); Record* const r = Record::allocate(Kokkos::Experimental::SYCLDeviceUSMSpace(*m_queue), "Kokkos::InternalScratchFlags", (sizeScratchGrain * m_scratchFlagsCount)); Record::increment(r); m_scratchFlags = reinterpret_cast<size_type*>(r->data()); } m_queue->memset(m_scratchFlags, 0, m_scratchFlagsCount * sizeScratchGrain); fence(*m_queue); return m_scratchFlags; } } // namespace Impl } // namespace Experimental } // namespace Kokkos
1
28,416
Why are you including `<Kokkos_Core.hpp>`?
kokkos-kokkos
cpp
@@ -323,7 +323,7 @@ int api_v1_health(struct flb_hs *hs) counter_init(hs); /* Create a message queue */ - hs->qid_metrics = mk_mq_create(hs->ctx, "/health", + hs->qid_health = mk_mq_create(hs->ctx, "/health", cb_mq_health, NULL); mk_vhost_handler(hs->ctx, hs->vid, "/api/v1/health", cb_health, hs);
1
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* Fluent Bit * ========== * Copyright (C) 2019-2021 The Fluent Bit Authors * Copyright (C) 2015-2018 Treasure Data Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include<stdio.h> #include <stdlib.h> #include <fluent-bit/flb_info.h> #include <fluent-bit/flb_pack.h> #include <fluent-bit/flb_sds.h> #include <fluent-bit/flb_macros.h> #include <fluent-bit/flb_http_server.h> #include <msgpack.h> #include "health.h" struct flb_health_check_metrics_counter *metrics_counter; pthread_key_t hs_health_key; static struct mk_list *hs_health_key_create() { struct mk_list *metrics_list = NULL; metrics_list = flb_malloc(sizeof(struct mk_list)); if (!metrics_list) { flb_errno(); return NULL; } mk_list_init(metrics_list); pthread_setspecific(hs_health_key, metrics_list); return metrics_list; } static void hs_health_key_destroy(void *data) { struct mk_list *metrics_list = (struct mk_list*)data; struct mk_list *tmp; struct mk_list *head; struct flb_hs_hc_buf *entry; if (metrics_list == NULL) { return; } mk_list_foreach_safe(head, tmp, metrics_list) { entry = mk_list_entry(head, struct flb_hs_hc_buf, _head); if (entry != NULL) { mk_list_del(&entry->_head); flb_free(entry); } } flb_free(metrics_list); } /* initialize the metrics counters */ static void counter_init(struct flb_hs *hs) { metrics_counter = flb_malloc(sizeof(struct flb_health_check_metrics_counter)); if (!metrics_counter) { flb_errno(); return; } metrics_counter->error_counter = 0; metrics_counter->retry_failure_counter = 0; metrics_counter->error_limit = hs->config->hc_errors_count; metrics_counter->retry_failure_limit = hs->config->hc_retry_failure_count; metrics_counter->period_counter = 0; metrics_counter->period_limit = hs->config->health_check_period; } /* * tell what's the current status for health check * One default background is that the metrics received and saved into * message queue every time is a accumulation of error numbers, * not a error number in recent second. So to get the error number * in a period, we need to use: * the error number of the newest metrics message minus * the error number in oldest metrics of period */ static int is_healthy() { struct mk_list *metrics_list; struct flb_hs_hc_buf *buf; int period_errors; int period_retry_failure; metrics_list = pthread_getspecific(hs_health_key); if (metrics_list == NULL) { metrics_list = hs_health_key_create(); if (metrics_list == NULL) { return FLB_FALSE; } } if (mk_list_is_empty(metrics_list) == 0) { return FLB_TRUE; } /* Get the error metrics entry from the start time of current period */ buf = mk_list_entry_first(metrics_list, struct flb_hs_hc_buf, _head); /* * increase user so clean up function won't * free the memory and delete the data */ buf->users++; /* the error count saved in message queue is the number of * error count at that time. So the math is that: * the error count in current period = (current error count in total) - * (begin error count in the period) */ period_errors = metrics_counter->error_counter - buf->error_count; period_retry_failure = metrics_counter->retry_failure_counter - buf->retry_failure_count; buf->users--; if (period_errors > metrics_counter->error_limit || period_retry_failure > metrics_counter->retry_failure_limit) { return FLB_FALSE; } return FLB_TRUE; } /* read the metrics from message queue and update the counter*/ static void read_metrics(void *data, size_t size, int* error_count, int* retry_failure_count) { int i; int j; int m; msgpack_unpacked result; msgpack_object map; size_t off = 0; int errors = 0; int retry_failure = 0; msgpack_unpacked_init(&result); msgpack_unpack_next(&result, data, size, &off); map = result.data; for (i = 0; i < map.via.map.size; i++) { msgpack_object k; msgpack_object v; /* Keys: input, output */ k = map.via.map.ptr[i].key; v = map.via.map.ptr[i].val; if (k.via.str.size != sizeof("output") - 1 || strncmp(k.via.str.ptr, "output", k.via.str.size) != 0) { continue; } /* Iterate sub-map */ for (j = 0; j < v.via.map.size; j++) { msgpack_object sv; /* Keys: plugin name , values: metrics */ sv = v.via.map.ptr[j].val; for (m = 0; m < sv.via.map.size; m++) { msgpack_object mk; msgpack_object mv; mk = sv.via.map.ptr[m].key; mv = sv.via.map.ptr[m].val; if (mk.via.str.size == sizeof("errors") - 1 && strncmp(mk.via.str.ptr, "errors", mk.via.str.size) == 0) { errors += mv.via.u64; } else if (mk.via.str.size == sizeof("retries_failed") - 1 && strncmp(mk.via.str.ptr, "retries_failed", mk.via.str.size) == 0) { retry_failure += mv.via.u64; } } } } *error_count = errors; *retry_failure_count = retry_failure; msgpack_unpacked_destroy(&result); } /* * Delete unused metrics, note that we only care about the latest node * we use this function to maintain the metrics queue only save the metrics * in a period. The old metrics which is out of period will be removed */ static int cleanup_metrics() { int c = 0; struct mk_list *tmp; struct mk_list *head; struct mk_list *metrics_list; struct flb_hs_hc_buf *entry; metrics_list = pthread_getspecific(hs_health_key); if (!metrics_list) { return -1; } if (metrics_counter->period_counter < metrics_counter->period_limit) { return 0; } /* remove the oldest metrics if it's out of period */ mk_list_foreach_safe(head, tmp, metrics_list) { entry = mk_list_entry(head, struct flb_hs_hc_buf, _head); if (metrics_counter->period_counter > metrics_counter->period_limit && entry->users == 0) { metrics_counter->period_counter--; mk_list_del(&entry->_head); flb_free(entry); c++; } else { break; } } return c; } /* * Callback invoked every time some metrics are received through a * message queue channel. This function runs in a Monkey HTTP thread * worker and it purpose is to take the metrics data and record the health * status based on the metrics. * This happens every second based on the event config. * So we treat period_counter to count the time. * And we maintain a message queue with the size of period limit number * so every time we get a new metrics data in, if the message queue size is * large than period limit, we will do the clean up func to * remove the oldest metrics. */ static void cb_mq_health(mk_mq_t *queue, void *data, size_t size) { struct flb_hs_hc_buf *buf; struct mk_list *metrics_list = NULL; int error_count = 0; int retry_failure_count = 0; metrics_list = pthread_getspecific(hs_health_key); if (metrics_list == NULL) { metrics_list = hs_health_key_create(); if (metrics_list == NULL) { return; } } metrics_counter->period_counter++; /* this is to remove the metrics out of period*/ cleanup_metrics(); buf = flb_malloc(sizeof(struct flb_hs_hc_buf)); if (!buf) { flb_errno(); return; } buf->users = 0; read_metrics(data, size, &error_count, &retry_failure_count); metrics_counter->error_counter = error_count; metrics_counter->retry_failure_counter = retry_failure_count; buf->error_count = error_count; buf->retry_failure_count = retry_failure_count; mk_list_add(&buf->_head, metrics_list); } /* API: Get fluent Bit Health Status */ static void cb_health(mk_request_t *request, void *data) { int status = is_healthy(); if (status == FLB_TRUE) { mk_http_status(request, 200); mk_http_send(request, "ok\n", strlen("ok\n"), NULL); mk_http_done(request); } else { mk_http_status(request, 500); mk_http_send(request, "error\n", strlen("error\n"), NULL); mk_http_done(request); } } /* Perform registration */ int api_v1_health(struct flb_hs *hs) { pthread_key_create(&hs_health_key, hs_health_key_destroy); counter_init(hs); /* Create a message queue */ hs->qid_metrics = mk_mq_create(hs->ctx, "/health", cb_mq_health, NULL); mk_vhost_handler(hs->ctx, hs->vid, "/api/v1/health", cb_health, hs); return 0; } void flb_hs_health_destroy() { flb_free(metrics_counter); }
1
16,081
So previously health check was consuming from the normal metrics queue, now we duplicate the data to a new queue? I feel like there should be more changes to ensure the health check handler reads from the new queue? @DrewZhang13
fluent-fluent-bit
c
@@ -1470,6 +1470,7 @@ func isValidRuleForXDP(rule *proto.Rule) bool { len(rule.DstPorts) == 0 && len(rule.DstNamedPortIpSetIds) == 0 && len(rule.DstIpSetIds) == 0 && + len(rule.DstIpPortSetIds) == 0 && len(rule.NotDstNet) == 0 && len(rule.NotDstPorts) == 0 && len(rule.NotDstIpSetIds) == 0 &&
1
// Copyright (c) 2019-2021 Tigera, Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package intdataplane import ( "fmt" "net" "strings" "time" log "github.com/sirupsen/logrus" "github.com/projectcalico/felix/bpf" "github.com/projectcalico/felix/ipsets" "github.com/projectcalico/felix/proto" "github.com/projectcalico/libcalico-go/lib/set" ) // XDP state manages XDP programs installed on network interfaces and // the BPF maps those programs use. Each network interface that has an // XDP program installed has its own corresponding BPF map. The "map" // part in "BPF map" suggests a key-value store. And indeed keys are // CIDRs, and values are implementation specific stuff (for now, just // a reference counter). If a CIDR is in a map then it means that // traffic coming from the IP addresses that match this CIDR is // blocked. // // To set up the XDP program and the map we need two things: a list of // network interface names and the list of blocked CIDRs for each // network interface. The list of blocked CIDRs can be different for // each network interface. // // To get the required data we need to track a chain of information we // get from the data store. From the datastore we can receive // information about network interfaces, host endpoints, policies, and // ipsets. Network interfaces are associated with host endpoints. Host // endpoints have information about policies that are applied to the // network interface associated with a particular host endpoint. The // policy can contain information about IDs of ipsets. Ipsets are // basically a sets of members. And these members are put into BPF // maps. // // XDP state does not receive the information about all the above // directly from the datastore, but indirectly through various // managers using callbacks. The network interface and host endpoint // stuff comes from the endpoint manager, policies come from the // policy manager, and ipsets come from the ipsets manager. Callbacks // are set in the PopulateCallbacks function. // // XDP state gathers information during the first phase of the // internal dataplane event loop, where the internal dataplane routes // messages from the data store to each manager, which in turn may // invoke some callbacks. Those callbacks are also invoked at the // beginning of the second phase of the internal dataplane event loop, // where the internal dataplane tells each manager to complete its // deferred work. // // XDP state contains an IP state which is a representation of an XDP // state for a specific IP family. Currently it only contains such a // thing for IPv4. Among other data the IP state has a field called // system state which is a view of the information from the data store // that is relevant to XDP. That is: network interface names, host // endpoints, policies, and ipset IDs. Note the lack of ipset contents // - this is to preserve memory. Such a form of a system state // requires us to perform updates of the XDP state in two steps: // processing pending diff state together with applying BPF actions, // and processing member updates. // // After the information gathering is done, it is processed to figure // out the next system state of XDP and generate BPF actions to go to // the desired state from the current one. This part is done in the // ProcessPendingDiffState function. // // Next step is to apply the actions. This happens in the // ApplyBPFActions function. // // Then we need to process member updates. This consumes the // information we get from the ipset manager about changes within // ipsets. This happens in the ProcessMemberUpdates function. // // There is a special step for resynchronization - it modifies BPF // actions based on the actual state of XDP on the system and the // desired state. See the ResyncIfNeeded function. type xdpState struct { ipV4State *xdpIPState common xdpStateCommon } func NewXDPState(allowGenericXDP bool) (*xdpState, error) { lib, err := bpf.NewBPFLib("/usr/lib/calico/bpf/") if err != nil { return nil, err } return NewXDPStateWithBPFLibrary(lib, allowGenericXDP), nil } func NewXDPStateWithBPFLibrary(library bpf.BPFDataplane, allowGenericXDP bool) *xdpState { log.Debug("Created new xdpState.") return &xdpState{ ipV4State: newXDPIPState(4), common: xdpStateCommon{ programTag: "", needResync: true, bpfLib: library, xdpModes: getXDPModes(allowGenericXDP), }, } } func (x *xdpState) OnUpdate(protoBufMsg interface{}) { log.WithField("msg", protoBufMsg).Debug("Received message") switch msg := protoBufMsg.(type) { case *proto.IPSetDeltaUpdate: log.WithField("ipSetId", msg.Id).Debug("IP set delta update") x.ipV4State.addMembersIPSet(msg.Id, membersToSet(msg.AddedMembers)) x.ipV4State.removeMembersIPSet(msg.Id, membersToSet(msg.RemovedMembers)) case *proto.IPSetUpdate: log.WithField("ipSetId", msg.Id).Debug("IP set update") x.ipV4State.replaceIPSet(msg.Id, membersToSet(msg.Members)) case *proto.IPSetRemove: log.WithField("ipSetId", msg.Id).Debug("IP set remove") x.ipV4State.removeIPSet(msg.Id) case *proto.ActivePolicyUpdate: log.WithField("id", msg.Id).Debug("Updating policy chains") x.ipV4State.updatePolicy(*msg.Id, msg.Policy) case *proto.ActivePolicyRemove: log.WithField("id", msg.Id).Debug("Removing policy chains") x.ipV4State.removePolicy(*msg.Id) } } func (x *xdpState) CompleteDeferredWork() error { return nil } func (x *xdpState) PopulateCallbacks(cbs *callbacks) { if x.ipV4State != nil { cbIDs := []*CbID{ cbs.AddInterfaceV4.Append(x.ipV4State.addInterface), cbs.RemoveInterfaceV4.Append(x.ipV4State.removeInterface), cbs.UpdateInterfaceV4.Append(x.ipV4State.updateInterface), cbs.UpdateHostEndpointV4.Append(x.ipV4State.updateHostEndpoint), cbs.RemoveHostEndpointV4.Append(x.ipV4State.removeHostEndpoint), } x.ipV4State.cbIDs = append(x.ipV4State.cbIDs, cbIDs...) } } func (x *xdpState) DepopulateCallbacks(cbs *callbacks) { if x.ipV4State != nil { for _, id := range x.ipV4State.cbIDs { cbs.Drop(id) } x.ipV4State.cbIDs = nil } } func (x *xdpState) QueueResync() { x.common.needResync = true } func (x *xdpState) ProcessPendingDiffState(epSourceV4 endpointsSource) { if x.ipV4State != nil { x.ipV4State.processPendingDiffState(epSourceV4) } } func (x *xdpState) ResyncIfNeeded(ipsSourceV4 ipsetsSource) error { var err error if !x.common.needResync { return nil } success := false for i := 0; i < 10; i++ { if i > 0 { log.Info("Retrying after an XDP update failure...") } log.Debug("Resyncing XDP state with dataplane.") err = x.tryResync(newConvertingIPSetsSource(ipsSourceV4)) if err == nil { success = true break } } if !success { return fmt.Errorf("failed to resync: %v", err) } x.common.needResync = false return nil } func (x *xdpState) ApplyBPFActions(ipsSource ipsetsSource) error { if x.ipV4State != nil { memberCacheV4 := newXDPMemberCache(x.ipV4State.getBpfIPFamily(), x.common.bpfLib) err := x.ipV4State.bpfActions.apply(memberCacheV4, x.ipV4State.ipsetIDsToMembers, newConvertingIPSetsSource(ipsSource), x.common.xdpModes) x.ipV4State.bpfActions = newXDPBPFActions() if err != nil { log.WithError(err).Info("Applying BPF actions did not succeed. Queueing XDP resync.") x.QueueResync() return err } } return nil } func (x *xdpState) ProcessMemberUpdates() error { if x.ipV4State != nil { memberCacheV4 := newXDPMemberCache(x.ipV4State.getBpfIPFamily(), x.common.bpfLib) err := x.ipV4State.processMemberUpdates(memberCacheV4) if err != nil { log.WithError(err).Info("Processing member updates did not succeed. Queueing XDP resync.") x.QueueResync() return err } } return nil } func (x *xdpState) DropPendingDiffState() { if x.ipV4State != nil { x.ipV4State.pendingDiffState = newXDPPendingDiffState() } } func (x *xdpState) UpdateState() { if x.ipV4State != nil { x.ipV4State.currentState, x.ipV4State.newCurrentState = x.ipV4State.newCurrentState, nil x.ipV4State.cleanupCache() } } // WipeXDP clears any previously set XDP state, returning an error if synchronization fails. func (x *xdpState) WipeXDP() error { savedIPV4State := x.ipV4State x.ipV4State = newXDPIPState(4) x.ipV4State.newCurrentState = newXDPSystemState() defer func() { x.ipV4State = savedIPV4State }() // Nil source, we are not going to use it anyway, // because we are about to drop everything, and when // we only drop stuff, the code does not call // ipsetsSource functions at all. ipsSource := &nilIPSetsSource{} if err := x.tryResync(ipsSource); err != nil { return err } if err := x.ApplyBPFActions(ipsSource); err != nil { return err } x.QueueResync() return nil } func (x *xdpState) tryResync(ipsSourceV4 ipsetsSource) error { if x.common.programTag == "" { tag, err := x.common.bpfLib.GetXDPObjTagAuto() if err != nil { return err } x.common.programTag = tag } if x.ipV4State != nil { if err := x.ipV4State.tryResync(&x.common, ipsSourceV4); err != nil { return err } } return nil } // xdpIPState holds the XDP state specific to an IP family. type xdpIPState struct { ipFamily int ipsetIDsToMembers *ipsetIDsToMembers currentState *xdpSystemState pendingDiffState *xdpPendingDiffState newCurrentState *xdpSystemState bpfActions *xdpBPFActions cbIDs []*CbID logCxt *log.Entry } type ipsetIDsToMembers struct { cache map[string]set.Set // ipSetID -> members pendingReplaces map[string]set.Set // ipSetID -> members pendingAdds map[string]set.Set // ipSetID -> members pendingDeletions map[string]set.Set // ipSetID -> members } func newIPSetIDsToMembers() *ipsetIDsToMembers { i := &ipsetIDsToMembers{} i.Clear() return i } func (i *ipsetIDsToMembers) Clear() { i.cache = make(map[string]set.Set) i.pendingReplaces = make(map[string]set.Set) i.pendingAdds = make(map[string]set.Set) i.pendingDeletions = make(map[string]set.Set) } func (i *ipsetIDsToMembers) GetCached(setID string) (s set.Set, ok bool) { s, ok = i.cache[setID] return } func safeAdd(m map[string]set.Set, setID, member string) { if m[setID] == nil { m[setID] = set.New() } m[setID].Add(member) } func (i *ipsetIDsToMembers) AddMembers(setID string, members set.Set) { if _, ok := i.cache[setID]; !ok { // not tracked by XDP return } if rs, ok := i.pendingReplaces[setID]; ok { members.Iter(func(item interface{}) error { member := item.(string) rs.Add(member) return nil }) } else { members.Iter(func(item interface{}) error { member := item.(string) safeAdd(i.pendingAdds, setID, member) return nil }) } } func (i *ipsetIDsToMembers) RemoveMembers(setID string, members set.Set) { if _, ok := i.cache[setID]; !ok { // not tracked by XDP return } if rs, ok := i.pendingReplaces[setID]; ok { members.Iter(func(item interface{}) error { member := item.(string) rs.Discard(member) return nil }) } else { members.Iter(func(item interface{}) error { member := item.(string) safeAdd(i.pendingDeletions, setID, member) return nil }) } } func (i *ipsetIDsToMembers) Delete(setID string) { if _, ok := i.cache[setID]; !ok { // not tracked by XDP return } i.pendingReplaces[setID] = set.New() delete(i.pendingAdds, setID) delete(i.pendingDeletions, setID) } func (i *ipsetIDsToMembers) Replace(setID string, members set.Set) { if _, ok := i.cache[setID]; !ok { // not tracked by XDP return } i.pendingReplaces[setID] = members delete(i.pendingAdds, setID) delete(i.pendingDeletions, setID) } func (i *ipsetIDsToMembers) UpdateCache() { cachedSetIDs := set.New() for setID := range i.cache { cachedSetIDs.Add(setID) } cachedSetIDs.Iter(func(item interface{}) error { setID := item.(string) if m, ok := i.pendingReplaces[setID]; ok { i.cache[setID] = m } else { if m, ok := i.pendingDeletions[setID]; ok { m.Iter(func(item interface{}) error { member := item.(string) i.cache[setID].Discard(member) return nil }) } if m, ok := i.pendingAdds[setID]; ok { m.Iter(func(item interface{}) error { member := item.(string) i.cache[setID].Add(member) return nil }) } } return nil }) // flush everything i.pendingReplaces = make(map[string]set.Set) i.pendingAdds = make(map[string]set.Set) i.pendingDeletions = make(map[string]set.Set) } func (i *ipsetIDsToMembers) SetCache(setID string, members set.Set) { i.cache[setID] = members } func newXDPIPState(ipFamily int) *xdpIPState { return &xdpIPState{ ipFamily: ipFamily, ipsetIDsToMembers: newIPSetIDsToMembers(), currentState: newXDPSystemState(), pendingDiffState: newXDPPendingDiffState(), bpfActions: newXDPBPFActions(), cbIDs: nil, logCxt: log.WithField("family", ipFamily), } } func (s *xdpIPState) getBpfIPFamily() bpf.IPFamily { if s.ipFamily == 4 { return bpf.IPFamilyV4 } s.logCxt.WithField("ipFamily", s.ipFamily).Panic("Invalid ip family.") return bpf.IPFamilyUnknown } // newXDPResyncState creates the xdpResyncState object, returning an error on failure. func (s *xdpIPState) newXDPResyncState(bpfLib bpf.BPFDataplane, ipsSource ipsetsSource, programTag string, xpdModes []bpf.XDPMode) (*xdpResyncState, error) { xdpIfaces, err := bpfLib.GetXDPIfaces() if err != nil { return nil, err } s.logCxt.WithField("ifaces", xdpIfaces).Debug("Interfaces with XDP program installed.") ifacesWithProgs := make(map[string]progInfo, len(xdpIfaces)) for _, iface := range xdpIfaces { tag, tagErr := bpfLib.GetXDPTag(iface) mode, modeErr := bpfLib.GetXDPMode(iface) // error can happen when the program was not pinned in the bpf filesystem, so we say it's bogus anyway bogus := tagErr != nil || tag != programTag || modeErr != nil || !isValidMode(mode, xpdModes) ifacesWithProgs[iface] = progInfo{ bogus: bogus, } } ifacesWithPinnedMaps, err := bpfLib.ListCIDRMaps(s.getBpfIPFamily()) if err != nil { return nil, err } s.logCxt.WithField("ifaces", ifacesWithPinnedMaps).Debug("Interfaces with BPF blacklist maps.") ifacesWithMaps := make(map[string]mapInfo, len(ifacesWithPinnedMaps)) for _, iface := range ifacesWithPinnedMaps { mapOk, err := bpfLib.IsValidMap(iface, s.getBpfIPFamily()) if err != nil { return nil, err } mapBogus := !mapOk mapMismatch, err := func() (bool, error) { if _, ok := ifacesWithProgs[iface]; !ok { return false, nil } mapID, err := bpfLib.GetCIDRMapID(iface, s.getBpfIPFamily()) if err != nil { return false, err } mapIDs, err := bpfLib.GetMapsFromXDP(iface) if err != nil { return false, err } matched := false for _, id := range mapIDs { if mapID == id { matched = true break } } return !matched, nil }() if err != nil { return nil, err } var mapContents map[bpf.CIDRMapKey]uint32 if !mapBogus { dump, err := bpfLib.DumpCIDRMap(iface, s.getBpfIPFamily()) if err != nil { return nil, err } mapContents = dump } ifacesWithMaps[iface] = mapInfo{ bogus: mapBogus, mismatched: mapMismatch, contents: mapContents, } s.logCxt.WithFields(log.Fields{ "iface": iface, "info": ifacesWithMaps[iface], }).Debug("Information about BPF blacklist map.") } visited := set.New() ipsetMembers := make(map[string]set.Set) for _, data := range s.newCurrentState.IfaceNameToData { for _, setIDs := range data.PoliciesToSetIDs { var opErr error setIDs.Iter(func(item interface{}) error { setID := item.(string) if visited.Contains(setID) { return nil } members, err := s.getIPSetMembers(setID, ipsSource) if err != nil { opErr = err return set.StopIteration } s.logCxt.WithFields(log.Fields{ "setID": setID, "memberCount": members.Len(), }).Debug("Information about ipset members.") ipsetMembers[setID] = members visited.Add(setID) return nil }) if opErr != nil { return nil, opErr } } } return &xdpResyncState{ ifacesWithProgs: ifacesWithProgs, ifacesWithMaps: ifacesWithMaps, ipsetMembers: ipsetMembers, }, nil } func isValidMode(mode bpf.XDPMode, xdpModes []bpf.XDPMode) bool { for _, xdpMode := range xdpModes { if xdpMode == mode { return true } } return false } func (s *xdpIPState) getIPSetMembers(setID string, ipsSource ipsetsSource) (set.Set, error) { return getIPSetMembers(s.ipsetIDsToMembers, setID, ipsSource) } // tryResync reconciles the system's XDP state (derived from xdpStateCommon) // with desired state (see ipsSource and the IpSetsManager for implementation details). // It modifies the BPF actions based on the state of XDP on the system // and on the desired state. It also repopulates the members cache. // // This function ensures that after applying the BPF actions, the XDP // state will be consistent. Which means making sure that XDP programs // are installed in desired interfaces, that they are referencing // correct maps, and that maps contain the desired ipsets. func (s *xdpIPState) tryResync(common *xdpStateCommon, ipsSource ipsetsSource) error { resyncStart := time.Now() defer func() { s.logCxt.WithField("resyncDuration", time.Since(resyncStart)).Debug("Finished XDP resync.") }() s.ipsetIDsToMembers.Clear() resyncState, err := s.newXDPResyncState(common.bpfLib, ipsSource, common.programTag, common.xdpModes) if err != nil { return err } s.fixupXDPProgramAndMapConsistency(resyncState) s.fixupBlacklistContents(resyncState) return nil } // fixupXDPProgramAndMapConsistency ensures that XDP programs are // installed on the proper network interfaces, are valid, and // reference the correct maps. // // There are several concepts related to programs and maps: // // A program can be installed or not. If the program is installed, it // can be valid or not. A valid XDP program is a program that has an // expected tag. Tag is basically a checksum of the program's // bytecode. We figure out the desired program tag on the first // resync. The tag is computed by the kernel, so it is not something // we can know in advance. // // A map can exist or not. If it exists then it can be valid or // not. If it is valid then it can be mismatched or not. A valid map // is a map of an expected type with an expected key and value size // (for the kernel, keys and values are purely array of bytes, and the // length of those arrays needs to be defined at map creation time // along with the map type). A mismatched map means that it is not // used by the program. Which in reality means that the program is // invalid and needs to be replaced. // // Since an XDP program references a BPF map and not the other way // around, it means that if a map is invalid and needs to be replaced, // then the program that references the map needs to be replaced too. // In case of mismatched maps, only the program gets replaced. func (s *xdpIPState) fixupXDPProgramAndMapConsistency(resyncState *xdpResyncState) { ifaces := s.getIfaces(resyncState, giNS|giWX|giIX|giUX|giWM|giCM|giRM) ifaces.Iter(func(item interface{}) error { iface := item.(string) shouldHaveXDP := func() bool { if data, ok := s.newCurrentState.IfaceNameToData[iface]; ok { return data.NeedsXDP() } return false }() hasXDP, hasBogusXDP := func() (bool, bool) { if progInfo, ok := resyncState.ifacesWithProgs[iface]; ok { return true, progInfo.bogus } return false, false }() mapExists, mapBogus, mapMismatch := func() (bool, bool, bool) { if mapInfo, ok := resyncState.ifacesWithMaps[iface]; ok { return true, mapInfo.bogus, mapInfo.mismatched } return false, false, false }() s.logCxt.WithFields(log.Fields{ "iface": iface, "hasProgram": hasXDP, "isProgramBogus": hasBogusXDP, "wantsProgram": shouldHaveXDP, "mapExists": mapExists, "mapBogus": mapBogus, "mapMismatched": mapMismatch, }).Debug("Resync - fixing XDP program and map consistency.") func() { if !hasXDP && !shouldHaveXDP { s.bpfActions.InstallXDP.Discard(iface) s.bpfActions.UninstallXDP.Discard(iface) if !mapExists { s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } else { s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Add(iface) } return } if !hasXDP && shouldHaveXDP { s.bpfActions.InstallXDP.Add(iface) s.bpfActions.UninstallXDP.Discard(iface) if !mapExists { s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Discard(iface) } else if mapBogus { s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Add(iface) } else { // mismatch is not possible, so it's a // good map s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } return } if hasXDP && !shouldHaveXDP { s.bpfActions.InstallXDP.Discard(iface) s.bpfActions.UninstallXDP.Add(iface) if !mapExists { s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } else { s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Add(iface) } return } if hasXDP && !hasBogusXDP && shouldHaveXDP { if !mapExists { // Good program, but no map? Means the // program needs to be replaced, so it // reads from the correct maps. The // map needs to be created. s.bpfActions.InstallXDP.Add(iface) s.bpfActions.UninstallXDP.Add(iface) s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Discard(iface) } else if mapBogus { // Good program, but bogus map? Means // the program needs to be replaced, // so it reads from the correct // maps. The map needs to be replaced. s.bpfActions.InstallXDP.Add(iface) s.bpfActions.UninstallXDP.Add(iface) s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Add(iface) } else if mapMismatch { // Good program, but mismatched map? // Means the program needs to be // replaced, so it reads from the // correct maps. The map itself is // fine. s.bpfActions.InstallXDP.Add(iface) s.bpfActions.UninstallXDP.Add(iface) s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } else { // Good program reading from correct // maps. Nothing to do. s.bpfActions.InstallXDP.Discard(iface) s.bpfActions.UninstallXDP.Discard(iface) s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } return } if hasXDP && hasBogusXDP && shouldHaveXDP { s.bpfActions.InstallXDP.Add(iface) s.bpfActions.UninstallXDP.Add(iface) if !mapExists { s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Discard(iface) } else if mapBogus { s.bpfActions.CreateMap.Add(iface) s.bpfActions.RemoveMap.Add(iface) } else { // Mismatched or not, the map itself // is ok, so nothing to do here. The // replaced program will make use of // it. s.bpfActions.CreateMap.Discard(iface) s.bpfActions.RemoveMap.Discard(iface) } return } }() s.logCxt.WithFields(log.Fields{ "iface": iface, "installXDP": s.bpfActions.InstallXDP.Contains(iface), "uninstallXDP": s.bpfActions.UninstallXDP.Contains(iface), "createMap": s.bpfActions.CreateMap.Contains(iface), "removeMap": s.bpfActions.RemoveMap.Contains(iface), }).Debug("Resync - finished fixing XDP program and map consistency.") return nil }) } // fixupBlacklistContents ensures that contents of the BPF maps are in // sync with ipsets those maps should contain. // // There are two cases - the BPF map is going to be created/replaced, // and the BPF map already exists. When BPF map is about to be // created/replaced, we just need to set up BPF actions that are about // inserting whole ipsets into the BPF map. But if the map already // exists, then we need to dump the contents of the map, compute the // desired contents of the map, figure out the missing or superfluous // members and update the BPF actions that are about modifying the BPF // maps on a member level. func (s *xdpIPState) fixupBlacklistContents(resyncState *xdpResyncState) { ifaces := s.getIfaces(resyncState, giNS) ifaces.Iter(func(item interface{}) error { iface := item.(string) createMap := s.bpfActions.CreateMap.Contains(iface) s.logCxt.WithFields(log.Fields{ "iface": iface, "mapCreate": createMap, }).Debug("Resync - fixing map contents.") if createMap { s.fixupBlacklistContentsFreshMap(iface) } else { if _, ok := resyncState.ifacesWithMaps[iface]; !ok { s.logCxt.WithField("iface", iface).Panic("Resync - iface missing from ifaces with maps in resync state!") } s.fixupBlacklistContentsExistingMap(resyncState, iface) } s.logCxt.WithFields(log.Fields{ "iface": iface, "addToMap": s.bpfActions.AddToMap[iface], "removeFromMap": s.bpfActions.RemoveFromMap[iface], "membersToAdd": s.bpfActions.MembersToAdd[iface], "membersToDrop": s.bpfActions.MembersToDrop[iface], }).Debug("Resync - finished fixing map contents.") return nil }) for _, m := range []map[string]map[string]uint32{s.bpfActions.AddToMap, s.bpfActions.RemoveFromMap} { for iface := range m { if !ifaces.Contains(iface) { delete(m, iface) } } } } func (s *xdpIPState) fixupBlacklistContentsFreshMap(iface string) { setIDToRefCount := s.getSetIDToRefCountFromNewState(iface) s.bpfActions.AddToMap[iface] = setIDToRefCount delete(s.bpfActions.RemoveFromMap, iface) } func (s *xdpIPState) fixupBlacklistContentsExistingMap(resyncState *xdpResyncState, iface string) { membersInBpfMap := resyncState.ifacesWithMaps[iface].contents setIDsInNS := s.getSetIDToRefCountFromNewState(iface) membersInNS := make(map[string]uint32) for setID, refCount := range setIDsInNS { if _, ok := resyncState.ipsetMembers[setID]; !ok { s.logCxt.WithFields(log.Fields{ "iface": iface, "setID": setID, "wantedRefCount": refCount, }).Panic("Resync - set id missing from ip set members in resync state!") } resyncState.ipsetMembers[setID].Iter(func(item interface{}) error { member := item.(string) membersInNS[member] += refCount return nil }) } for mapKey, actualRefCount := range membersInBpfMap { member := mapKey.ToIPNet().String() expectedRefCount := membersInNS[member] s.logCxt.WithFields(log.Fields{ "iface": iface, "member": member, "actualRefCount": actualRefCount, "expectedRefCount": expectedRefCount, }).Debug("Resync - syncing member.") if expectedRefCount > actualRefCount { s.updateMembersToChange(s.bpfActions.MembersToAdd, iface, member, expectedRefCount-actualRefCount) } else if expectedRefCount < actualRefCount { s.updateMembersToChange(s.bpfActions.MembersToDrop, iface, member, actualRefCount-expectedRefCount) } delete(membersInNS, member) } for member, expectedRefCount := range membersInNS { s.logCxt.WithFields(log.Fields{ "iface": iface, "member": member, "expectedRefCount": expectedRefCount, }).Debug("Resync - missing member.") s.updateMembersToChange(s.bpfActions.MembersToAdd, iface, member, expectedRefCount) } delete(s.bpfActions.AddToMap, iface) delete(s.bpfActions.RemoveFromMap, iface) } func (s *xdpIPState) updateMembersToChange(membersToChangeMap map[string]map[string]uint32, iface, member string, refCount uint32) { memberToRefCountMap := func() map[string]uint32 { m := membersToChangeMap[iface] if m == nil { m = make(map[string]uint32) membersToChangeMap[iface] = m } return m }() memberToRefCountMap[member] += refCount } func (s *xdpIPState) getSetIDToRefCountFromNewState(iface string) map[string]uint32 { setIDToRefCount := make(map[string]uint32) if data, ok := s.newCurrentState.IfaceNameToData[iface]; ok { for _, setIDs := range data.PoliciesToSetIDs { setIDs.Iter(func(item interface{}) error { setID := item.(string) setIDToRefCount[setID] += 1 return nil }) } } return setIDToRefCount } type IfaceFlags uint8 const ( // from new state giNS = 1 << iota // from installXDP giIX // from uninstall XDP giUX // from ifacesWithProgs giWX // from createMaps giCM // from removeMaps giRM // from ifacesWithMaps giWM ) func (s *xdpIPState) getIfaces(resyncState *xdpResyncState, flags IfaceFlags) set.Set { ifaces := set.New() addFromSet := func(item interface{}) error { ifaces.Add(item) return nil } if flags&giNS == giNS { for iface, data := range s.newCurrentState.IfaceNameToData { if data.NeedsXDP() { ifaces.Add(iface) } } } if flags&giIX == giIX { s.bpfActions.InstallXDP.Iter(addFromSet) } if flags&giUX == giUX { s.bpfActions.UninstallXDP.Iter(addFromSet) } if flags&giWX == giWX { for iface := range resyncState.ifacesWithProgs { ifaces.Add(iface) } } if flags&giCM == giCM { s.bpfActions.CreateMap.Iter(addFromSet) } if flags&giRM == giRM { s.bpfActions.RemoveMap.Iter(addFromSet) } if flags&giWM == giWM { for iface := range resyncState.ifacesWithMaps { ifaces.Add(iface) } } return ifaces } // PROCESS MEMBER UPDATES func (s *xdpIPState) processMemberUpdates(memberCache *xdpMemberCache) error { s.logCxt.Debug("Processing member updates.") // process member changes changes := s.getMemberChanges() for setID, change := range changes { ifacesToRefCounts := s.getAffectedIfaces(setID) s.logCxt.WithFields(log.Fields{ "setID": setID, "affectedIfaces": ifacesToRefCounts, }).Debug("Processing member changes.") for iface, refCount := range ifacesToRefCounts { s.logCxt.WithFields(log.Fields{ "setID": setID, "iface": iface, "refCount": refCount, "toAdd": change.toAdd, "toDrop": change.toDrop, }).Debug("Processing BPF map changes.") miDelete := &memberIterSet{ members: change.toDrop, refCount: refCount, } if err := processMemberDeletions(memberCache, iface, miDelete); err != nil { return err } miAdd := &memberIterSet{ members: change.toAdd, refCount: refCount, } if err := processMemberAdds(memberCache, iface, miAdd); err != nil { return err } } } s.logCxt.Debug("Updating ipsetIDsToMembers cache.") s.ipsetIDsToMembers.UpdateCache() return nil } // processPendingDiffState processes the information the state has // gathered from callbacks and generates the new desired state and the // actions that, when executed, will get the current state into the // new desired state. // // The aim is to get a list of IP addresses/CIDRs to be blocked on // network interfaces. We can get addresses/CIDRs from ipsets. We can // get ipsets from policies. We can get policies from host endpoints. // Host endpoints are associated with network interfaces. All this // creates a chain from interface to addresses/CIDRs: network // interface -> host endpoint -> policies -> ipsets -> // addresses/CIDRs. // // In this function we process the information in the same order as it // is in the chain, so first we process the changes wrt. network // interfaces, then changes in host endpoints, then changes in // policies. Note that changes in ipsets themselves are processed // elsewhere (see the processMemberUpdates function), because members // of ipsets are not stored in the current state/new desired state. // Current state has a granularity up to the ipset ID level. // // The function is careful to process each interface at most once - so // if the network interface's host endpoint has changed and some // policy associated with the host endpoint has changed, then the // interface is only processed in the part of the code that handles // updates of the host endpoint and it is skipped in the code that // handles policy updates. func (s *xdpIPState) processPendingDiffState(epSource endpointsSource) { cs := s.currentState s.newCurrentState = cs.Copy() newCs := s.newCurrentState s.logCxt.WithField("cs", cs).Debug("Processing pending diff state.") pds := s.pendingDiffState ba := s.bpfActions rawHep := epSource.GetRawHostEndpoints() processedIfaces := set.New() // keys are interface names, values are maps with keys being // set IDs, and values being ref count delta (can be less or // greater than zero) changeInMaps := make(map[string]map[string]int) // CHANGES IN INTERFACES // new ifaces for ifaceName, hepID := range pds.NewIfaceNameToHostEpID { s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "hostEpId": hepID.String(), }).Debug("New iface with host endpoint.") s.processHostEndpointChange(ifaceName, &xdpIfaceData{}, hepID, rawHep[hepID], changeInMaps) processedIfaces.Add(ifaceName) } // dropped ifaces pds.IfaceNamesToDrop.Iter(func(item interface{}) error { ifName := item.(string) s.logCxt.WithField("iface", ifName).Debug("Iface is gone.") dropXDP := false if data, ok := cs.IfaceNameToData[ifName]; ok { dropXDP = data.NeedsXDP() } if dropXDP { ba.UninstallXDP.Add(ifName) ba.RemoveMap.Add(ifName) } delete(newCs.IfaceNameToData, ifName) processedIfaces.Add(ifName) return nil }) // Host Endpoints that changed for ifaceName, newEpID := range pds.IfaceEpIDChange { data := cs.IfaceNameToData[ifaceName] s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "oldHostEp": data.EpID.String(), "newHostEp": newEpID.String(), }).Debug("Iface has a different host endpoint.") s.processHostEndpointChange(ifaceName, &data, newEpID, rawHep[newEpID], changeInMaps) processedIfaces.Add(ifaceName) } // CHANGES IN HOST ENDPOINTS // Host Endpoints that were updated pds.UpdatedHostEndpoints.Iter(func(item interface{}) error { hepID := item.(proto.HostEndpointID) s.logCxt.WithField("hostEpId", hepID.String()).Debug("Host endpoint has changed.") for ifaceName, data := range cs.IfaceNameToData { if processedIfaces.Contains(ifaceName) { s.logCxt.WithField("iface", ifaceName).Debug("Iface already processed, ignoring.") // ignore continue } if data.EpID != hepID { s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "hostEpId": data.EpID.String(), }).Debug("Iface has different host endpoint, ignoring.") continue } s.logCxt.WithField("iface", ifaceName).Debug("Processing iface.") s.processHostEndpointChange(ifaceName, &data, hepID, rawHep[hepID], changeInMaps) processedIfaces.Add(ifaceName) } return nil }) // Host Endpoints that were removed pds.RemovedHostEndpoints.Iter(func(item interface{}) error { // XXX do nothing return nil }) // CHANGES IN POLICIES // Policies that should be removed pds.PoliciesToRemove.Iter(func(item interface{}) error { policyID := item.(proto.PolicyID) delete(newCs.XDPEligiblePolicies, policyID) return nil }) // Policies that should be updated ifacesWithUpdatedPolicies := set.New() for policyID, rules := range pds.PoliciesToUpdate { s.logCxt.WithFields(log.Fields{ "policyID": policyID.String(), "optimized": rules != nil, }).Debug("Policy updated.") for ifaceName, data := range cs.IfaceNameToData { if processedIfaces.Contains(ifaceName) { s.logCxt.WithField("iface", ifaceName).Debug("Iface already processed, ignoring.") continue } hep := rawHep[data.EpID] foundPolicyID := false for _, hepPolicyID := range getPolicyIDs(hep) { if hepPolicyID == policyID { foundPolicyID = true break } } if !foundPolicyID { s.logCxt.WithFields(log.Fields{ "policyID": policyID, "iface": ifaceName, }).Debug("Policy doesn't apply to iface, skipping iface.") continue } ifacesWithUpdatedPolicies.Add(ifaceName) m, ok := changeInMaps[ifaceName] if !ok { m = make(map[string]int) changeInMaps[ifaceName] = m } oldSetIDs := data.PoliciesToSetIDs[policyID] s.logCxt.WithFields(log.Fields{ "policyID": policyID.String(), "setIDs": dumpSetToString(oldSetIDs), }).Debug("Considering old set ID of policy.") if oldSetIDs != nil { // it means that the old version of the policy was optimized oldSetIDs.Iter(func(item interface{}) error { setID := item.(string) m[setID] -= 1 return nil }) } if rules != nil { // this means that new policy can be optimized newSetIDs := getSetIDs(rules) s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": data.EpID, "policyID": policyID.String(), "oldSetIDs": dumpSetToString(oldSetIDs), "newSetIDs": dumpSetToString(newSetIDs), }).Debug("Replacing old ipsets with new ones for optimizable policy") s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": data.EpID, "policyID": policyID.String(), }).Info("Policy will be optimized.") newSetIDs.Iter(func(item interface{}) error { setID := item.(string) m[setID] += 1 return nil }) newCs.IfaceNameToData[ifaceName].PoliciesToSetIDs[policyID] = newSetIDs } else { s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": data.EpID, "policyID": policyID.String(), "oldSetIDs": dumpSetToString(oldSetIDs), }).Debug("Dropping old ipsets for unoptimizable policy") s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": data.EpID, "policyID": policyID.String(), }).Info("Policy can not be optimized.") // this means that new policy can't be optimized delete(newCs.IfaceNameToData[ifaceName].PoliciesToSetIDs, policyID) } } if rules != nil { newCs.XDPEligiblePolicies[policyID] = *rules } else { delete(newCs.XDPEligiblePolicies, policyID) } } ifacesWithUpdatedPolicies.Iter(func(item interface{}) error { ifaceName := item.(string) oldData := cs.IfaceNameToData[ifaceName] newData := newCs.IfaceNameToData[ifaceName] oldNeedsXDP := oldData.NeedsXDP() newNeedsXDP := newData.NeedsXDP() if oldNeedsXDP && !newNeedsXDP { ba.UninstallXDP.Add(ifaceName) ba.RemoveMap.Add(ifaceName) } else if !oldNeedsXDP && newNeedsXDP { ba.InstallXDP.Add(ifaceName) ba.CreateMap.Add(ifaceName) } return nil }) // populate map changes for ifaceName, ips := range changeInMaps { if !ba.RemoveMap.Contains(ifaceName) { addIPSets := make(map[string]uint32) deleteIPSets := make(map[string]uint32) for setID, refCount := range ips { switch { case refCount > 0: addIPSets[setID] = uint32(refCount) case refCount < 0: deleteIPSets[setID] = uint32(-refCount) } } if len(addIPSets) > 0 { ba.AddToMap[ifaceName] = addIPSets } if len(deleteIPSets) > 0 { ba.RemoveFromMap[ifaceName] = deleteIPSets } } } s.logCxt.WithFields(log.Fields{ "newCS": newCs, "bpfActions": *ba, }).Debug("Finished processing pending diff state.") } func dumpSetToString(s set.Set) string { if s == nil { return "<empty>" } strs := make([]string, 0, s.Len()) s.Iter(func(item interface{}) error { strs = append(strs, fmt.Sprintf("%v", item)) return nil }) return strings.Join(strs, ", ") } func (s *xdpIPState) processHostEndpointChange(ifaceName string, oldData *xdpIfaceData, newHepID proto.HostEndpointID, newEP *proto.HostEndpoint, changeInMaps map[string]map[string]int) { policiesToSetIDs := make(map[proto.PolicyID]set.Set /*<string>*/) oldSetIDs := make(map[string]int) for _, setIDs := range oldData.PoliciesToSetIDs { setIDs.Iter(func(item interface{}) error { setID := item.(string) oldSetIDs[setID] += 1 return nil }) } newPolicyIDs := getPolicyIDs(newEP) newSetIDs := make(map[string]int) for _, policyID := range newPolicyIDs { rules := s.getLatestRulesForPolicyID(policyID) if rules == nil { s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": newHepID, "policyID": policyID.String(), }).Info("Policy can not be optimized.") continue } s.logCxt.WithFields(log.Fields{ "iface": ifaceName, "endpointID": newHepID, "policyID": policyID.String(), }).Info("Policy will be optimized.") rulesSetIDs := getSetIDs(rules) policiesToSetIDs[policyID] = rulesSetIDs rulesSetIDs.Iter(func(item interface{}) error { setID := item.(string) newSetIDs[setID] += 1 return nil }) } s.logCxt.WithFields(log.Fields{ "oldSetIDs": oldSetIDs, "newSetIDs": newSetIDs, }).Debug("Processing host endpoint change.") newData := xdpIfaceData{ EpID: newHepID, PoliciesToSetIDs: policiesToSetIDs, } s.newCurrentState.IfaceNameToData[ifaceName] = newData oldNeedsXDP := oldData.NeedsXDP() newNeedsXDP := newData.NeedsXDP() if oldNeedsXDP && !newNeedsXDP { s.bpfActions.UninstallXDP.Add(ifaceName) s.bpfActions.RemoveMap.Add(ifaceName) } else if !oldNeedsXDP && newNeedsXDP { s.bpfActions.InstallXDP.Add(ifaceName) s.bpfActions.CreateMap.Add(ifaceName) } m, ok := changeInMaps[ifaceName] if !ok { m = make(map[string]int) changeInMaps[ifaceName] = m } for setID, refCount := range newSetIDs { m[setID] += refCount } for setID, refCount := range oldSetIDs { m[setID] -= refCount } } func getPolicyIDs(hep *proto.HostEndpoint) []proto.PolicyID { var policyIDs []proto.PolicyID // we handle Untracked policy only for _, tier := range hep.GetUntrackedTiers() { for _, policyName := range tier.IngressPolicies { policyID := proto.PolicyID{ Tier: tier.Name, Name: policyName, } policyIDs = append(policyIDs, policyID) // TODO: For now we only support XDP // optimization of only the first untracked // policy, later we will want to support an // optimization of many rules as long as their // inbound rules form a sequence of // optimizable rules. break } } return policyIDs } func getSetIDs(rules *xdpRules) set.Set /*<string>*/ { setIDs := set.New() for _, rule := range rules.Rules { for _, setID := range rule.SetIDs { setIDs.Add(setID) } } return setIDs } func (s *xdpIPState) getLatestRulesForPolicyID(policyID proto.PolicyID) *xdpRules { logCxt := s.logCxt.WithField("policyID", policyID.String()) rules, ok := s.pendingDiffState.PoliciesToUpdate[policyID] if ok { logCxt.Debug("Policy is updated.") if rules == nil { s.logCxt.Debug("Policy can't be optimized.") } return rules } else { logCxt.Debug("Policy is not updated.") xdpRules, ok := s.newCurrentState.XDPEligiblePolicies[policyID] if ok { return &xdpRules } else { logCxt.Debug("Policy can't be optimized.") return nil } } } func (s *xdpIPState) updatePolicy(policyID proto.PolicyID, policy *proto.Policy) { s.logCxt.WithFields(log.Fields{ "policyID": policyID, "policy": policy, }).Debug("updatePolicy callback called.") s.pendingDiffState.PoliciesToRemove.Discard(policyID) if xdpRules, ok := xdpRulesFromProtoRules(policy.InboundRules, policy.OutboundRules); ok { s.logCxt.WithField("policyID", policyID).Debug("Policy can be optimized.") s.pendingDiffState.PoliciesToUpdate[policyID] = &xdpRules } else { s.logCxt.WithField("policyID", policyID).Debug("Policy can not be optimized.") s.pendingDiffState.PoliciesToUpdate[policyID] = nil } } func (s *xdpIPState) removePolicy(policyID proto.PolicyID) { s.logCxt.WithField("policyID", policyID).Debug("removePolicy callback called.") delete(s.pendingDiffState.PoliciesToUpdate, policyID) s.pendingDiffState.PoliciesToRemove.Add(policyID) } func xdpRulesFromProtoRules(inboundRules, outboundRules []*proto.Rule) (xdpRules, bool) { xdpRules := xdpRules{} isValid := len(inboundRules) > 0 && // TODO: Maybe we should take all the initial rules // that have deny action? So in case of policy that // has 4 inbound rules with actions "deny", "deny", // "allow" and "deny, respectively, we would take // first two rules into account. isValidRuleForXDP(inboundRules[0]) if isValid { xdpRules.Rules = []xdpRule{ { SetIDs: inboundRules[0].SrcIpSetIds, }, } } return xdpRules, isValid } func isValidRuleForXDP(rule *proto.Rule) bool { return rule != nil && rule.Action == "deny" && // accept ipv4 traffic (or any, which matches ipv4 // too) // // TODO: drop the ip version check when we add support // for ipv6 (rule.IpVersion == proto.IPVersion_ANY || rule.IpVersion == proto.IPVersion_IPV4) && // accept only rules that don't specify a protocol, // which means blocking all the traffic rule.Protocol == nil && len(rule.SrcNet) == 0 && len(rule.SrcPorts) == 0 && len(rule.SrcNamedPortIpSetIds) == 0 && // have only a single ip-only selector len(rule.SrcIpSetIds) == 1 && rule.NotProtocol == nil && len(rule.NotSrcNet) == 0 && len(rule.NotSrcPorts) == 0 && len(rule.NotSrcIpSetIds) == 0 && len(rule.NotSrcNamedPortIpSetIds) == 0 && // have no icmp stuff rule.Icmp == nil && rule.NotIcmp == nil && // have no destination stuff len(rule.DstNet) == 0 && len(rule.DstPorts) == 0 && len(rule.DstNamedPortIpSetIds) == 0 && len(rule.DstIpSetIds) == 0 && len(rule.NotDstNet) == 0 && len(rule.NotDstPorts) == 0 && len(rule.NotDstIpSetIds) == 0 && len(rule.NotDstNamedPortIpSetIds) == 0 && // have no application layer policy stuff rule.HttpMatch == nil && rule.SrcServiceAccountMatch == nil && rule.DstServiceAccountMatch == nil // Note that XDP doesn't support writing rule.Metadata to the dataplane // (as we do using -m comment in iptables), but the rule still can be // rendered in XDP, so we place no constraints on rule.Metadata here. } func (s *xdpIPState) removeMembersIPSet(setID string, members set.Set) { s.logCxt.WithFields(log.Fields{ "setID": setID, "members": members, }).Debug("removeMembersIPSet callback called.") s.ipsetIDsToMembers.RemoveMembers(setID, members) } func (s *xdpIPState) addMembersIPSet(setID string, members set.Set) { s.logCxt.WithFields(log.Fields{ "setID": setID, "members": members, }).Debug("addMembersIPSet callback called.") s.ipsetIDsToMembers.AddMembers(setID, members) } func (s *xdpIPState) replaceIPSet(setID string, members set.Set) { s.logCxt.WithFields(log.Fields{ "setID": setID, "members": members, }).Debug("ReplaceIPSet callback called.") s.ipsetIDsToMembers.Replace(setID, members) } func (s *xdpIPState) removeIPSet(setID string) { s.logCxt.WithField("setID", setID).Debug("removeIPSet callback called.") s.ipsetIDsToMembers.Delete(setID) } func (s *xdpIPState) cleanupCache() { setIDs := set.New() for setID := range s.ipsetIDsToMembers.cache { setIDs.Add(setID) } for setID := range s.ipsetIDsToMembers.pendingReplaces { setIDs.Add(setID) } for setID := range s.ipsetIDsToMembers.pendingAdds { setIDs.Add(setID) } for setID := range s.ipsetIDsToMembers.pendingDeletions { setIDs.Add(setID) } setIDs.Iter(func(item interface{}) error { setID := item.(string) if !s.isSetIDInCurrentState(setID) { delete(s.ipsetIDsToMembers.cache, setID) delete(s.ipsetIDsToMembers.pendingReplaces, setID) delete(s.ipsetIDsToMembers.pendingAdds, setID) delete(s.ipsetIDsToMembers.pendingDeletions, setID) } return nil }) } func (s *xdpIPState) isSetIDInCurrentState(setID string) bool { for _, data := range s.currentState.IfaceNameToData { for _, setIDs := range data.PoliciesToSetIDs { if setIDs.Contains(setID) { return true } } } return false } func (s *xdpIPState) addInterface(ifaceName string, hostEPID proto.HostEndpointID) { s.logCxt.WithFields(log.Fields{ "ifaceName": ifaceName, "hostEPID": hostEPID, }).Debug("addInterface callback called.") s.pendingDiffState.NewIfaceNameToHostEpID[ifaceName] = hostEPID } func (s *xdpIPState) removeInterface(ifaceName string) { s.logCxt.WithField("ifaceName", ifaceName).Debug("removeInterface callback called.") s.pendingDiffState.IfaceNamesToDrop.Add(ifaceName) } func (s *xdpIPState) updateInterface(ifaceName string, newHostEPID proto.HostEndpointID) { s.logCxt.WithFields(log.Fields{ "ifaceName": ifaceName, "newHostEPID": newHostEPID, }).Debug("updateInterface callback called.") s.pendingDiffState.IfaceEpIDChange[ifaceName] = newHostEPID } func (s *xdpIPState) updateHostEndpoint(hostEPID proto.HostEndpointID) { s.logCxt.WithField("hostEPID", hostEPID).Debug("updateHostEndpoint callback called.") s.pendingDiffState.RemovedHostEndpoints.Discard(hostEPID) if !s.isHostEndpointIDInCurrentState(hostEPID) { s.logCxt.WithField("hostEpId", hostEPID.EndpointId).Debug("Host endpoint not in current state, ignoring.") return } s.pendingDiffState.UpdatedHostEndpoints.Add(hostEPID) } type memberChanges struct { toAdd set.Set toDrop set.Set } func (s *xdpIPState) getMemberChanges() map[string]memberChanges { changes := make(map[string]memberChanges) s.logCxt.WithFields(log.Fields{ "oldMembers": s.ipsetIDsToMembers.cache, }).Debug("Getting member changes.") for setID, oldMembers := range s.ipsetIDsToMembers.cache { s.logCxt.WithFields(log.Fields{ "setID": setID, "pendingReplaces": s.ipsetIDsToMembers.pendingReplaces[setID], "pendingAdds": s.ipsetIDsToMembers.pendingAdds[setID], "pendingDeletions": s.ipsetIDsToMembers.pendingDeletions[setID], }).Debug("Processing setID.") mc := memberChanges{ toAdd: set.New(), toDrop: set.New(), } if pr, ok := s.ipsetIDsToMembers.pendingReplaces[setID]; ok { mc.toAdd = setDifference(pr, oldMembers) mc.toDrop = setDifference(oldMembers, pr) } else { if pa, ok := s.ipsetIDsToMembers.pendingAdds[setID]; ok { mc.toAdd = pa } if pd, ok := s.ipsetIDsToMembers.pendingDeletions[setID]; ok { mc.toDrop = pd } } s.logCxt.WithFields(log.Fields{ "toAdd": mc.toAdd, "toDrop": mc.toDrop, }).Debug("Generating toAdd and toDrop.") changes[setID] = mc } return changes } func setDifference(a, b set.Set) set.Set { result := set.New() a.Iter(func(item interface{}) error { if !b.Contains(item) { result.Add(item) } return nil }) return result } func (s *xdpIPState) getAffectedIfaces(setID string) map[string]uint32 { ifacesToRefCounts := make(map[string]uint32) for iface, data := range s.newCurrentState.IfaceNameToData { for _, setIDs := range data.PoliciesToSetIDs { if setIDs.Contains(setID) { ifacesToRefCounts[iface] += 1 } } } return ifacesToRefCounts } func (s *xdpIPState) isHostEndpointIDInCurrentState(hep proto.HostEndpointID) bool { for _, data := range s.currentState.IfaceNameToData { if data.EpID == hep { return true } } return false } func (s *xdpIPState) removeHostEndpoint(hostEPID proto.HostEndpointID) { s.logCxt.WithField("hostEPID", hostEPID).Debug("removeHostEndpoint callback called.") s.pendingDiffState.RemovedHostEndpoints.Add(hostEPID) s.pendingDiffState.UpdatedHostEndpoints.Discard(hostEPID) } type xdpStateCommon struct { programTag string needResync bool bpfLib bpf.BPFDataplane xdpModes []bpf.XDPMode } type xdpSystemState struct { IfaceNameToData map[string]xdpIfaceData // a cache of all the policies that could be implemented with // XDP, even those that currently are not XDPEligiblePolicies map[proto.PolicyID]xdpRules } func newXDPSystemState() *xdpSystemState { return &xdpSystemState{ IfaceNameToData: make(map[string]xdpIfaceData), XDPEligiblePolicies: make(map[proto.PolicyID]xdpRules), } } func (s *xdpSystemState) Copy() *xdpSystemState { newState := xdpSystemState{ IfaceNameToData: make(map[string]xdpIfaceData), XDPEligiblePolicies: make(map[proto.PolicyID]xdpRules), } for k, v := range s.IfaceNameToData { newState.IfaceNameToData[k] = v.Copy() } for k, v := range s.XDPEligiblePolicies { newState.XDPEligiblePolicies[k] = v.Copy() } return &newState } type xdpPendingDiffState struct { NewIfaceNameToHostEpID map[string]proto.HostEndpointID IfaceNamesToDrop set.Set //<string> IfaceEpIDChange map[string]proto.HostEndpointID UpdatedHostEndpoints set.Set //<proto.HostEndpointID> RemovedHostEndpoints set.Set //<proto.HostEndpointID> PoliciesToRemove set.Set //<PolicyID> PoliciesToUpdate map[proto.PolicyID]*xdpRules } func newXDPPendingDiffState() *xdpPendingDiffState { return &xdpPendingDiffState{ NewIfaceNameToHostEpID: make(map[string]proto.HostEndpointID), IfaceNamesToDrop: set.New(), IfaceEpIDChange: make(map[string]proto.HostEndpointID), UpdatedHostEndpoints: set.New(), RemovedHostEndpoints: set.New(), PoliciesToRemove: set.New(), PoliciesToUpdate: make(map[proto.PolicyID]*xdpRules), } } type xdpBPFActions struct { // sets of interface names, for which a bpf map should be // created CreateMap set.Set //<string> // sets of interface names, for which a bpf map should be // dropped (or emptied in some cases) RemoveMap set.Set //<string> // The fields below are normalized, so for a given interface a // set ID will appear either in AddToMap or RemoveFromMap, // never in both at the same time // // Keys are interface names, values are maps where keys are // set IDs and values are ref counts to add/drop (positive // values) AddToMap map[string]map[string]uint32 RemoveFromMap map[string]map[string]uint32 // sets of interface names, where XDP program should be // loaded/attached InstallXDP set.Set //<string> // sets of interface names, where XDP program should be // unloaded/detached UninstallXDP set.Set //<string> // Resync fallout // keys are interface names, values are maps, where keys are // members and values are ref counts MembersToDrop map[string]map[string]uint32 MembersToAdd map[string]map[string]uint32 } func newXDPBPFActions() *xdpBPFActions { return &xdpBPFActions{ CreateMap: set.New(), RemoveMap: set.New(), AddToMap: make(map[string]map[string]uint32), RemoveFromMap: make(map[string]map[string]uint32), InstallXDP: set.New(), UninstallXDP: set.New(), MembersToDrop: make(map[string]map[string]uint32), MembersToAdd: make(map[string]map[string]uint32), } } // apply processes the contents of BPF actions - uninstalls and // installs XDP programs, creates and removes BPF maps, adds and // removes whole ipsets into/from the BPF maps, adds and removes // certain members to/from BPF maps. func (a *xdpBPFActions) apply(memberCache *xdpMemberCache, ipsetIDsToMembers *ipsetIDsToMembers, ipsSource ipsetsSource, xdpModes []bpf.XDPMode) error { var opErr error logCxt := log.WithField("family", memberCache.GetFamily().String()) // used for dropping programs, to handle the case when generic // xdp is currently disabled and we need to drop a program // installed in generic mode by previous felix instance which // had generic xdp enabled. allXDPModes := getXDPModes(true) logCxt.Debug("Processing BPF actions.") a.UninstallXDP.Iter(func(item interface{}) error { iface := item.(string) var removeErrs []error logCxt.WithField("iface", iface).Debug("Removing XDP programs.") for _, mode := range allXDPModes { if err := memberCache.bpfLib.RemoveXDP(iface, mode); err != nil { removeErrs = append(removeErrs, err) } // Note: keep trying to remove remaining possible modes, even if that one // appeared to succeed. With current kernel and iproute2, RemoveXDP reports // success if there _wasn't_ any XDP program attached in the specified mode. // So, if we stop after the first mode that reports success, we won't remove // the XDP program in the mode that is actually in use! } // Only report an error if _all_ of the mode-specific removals failed. if len(removeErrs) == len(allXDPModes) { opErr = fmt.Errorf("failed to remove XDP program from %s: %v", iface, removeErrs) return set.StopIteration } return nil }) if opErr != nil { return opErr } a.RemoveMap.Iter(func(item interface{}) error { iface := item.(string) logCxt.WithField("iface", iface).Debug("Removing BPF blacklist map.") if err := memberCache.bpfLib.RemoveCIDRMap(iface, memberCache.GetFamily()); err != nil { opErr = err return set.StopIteration } return nil }) if opErr != nil { return opErr } a.CreateMap.Iter(func(item interface{}) error { iface := item.(string) logCxt.WithField("iface", iface).Debug("Creating a BPF blacklist map.") if _, err := memberCache.bpfLib.NewCIDRMap(iface, memberCache.GetFamily()); err != nil { opErr = err return set.StopIteration } return nil }) if opErr != nil { return opErr } for iface, memberMap := range a.MembersToAdd { mi := &memberIterMap{ memberMap: memberMap, } if err := processMemberAdds(memberCache, iface, mi); err != nil { return err } } for iface, setIDMap := range a.AddToMap { for setID, refCount := range setIDMap { logCxt.WithFields(log.Fields{ "iface": iface, "setID": setID, "refCount": refCount, }).Debug("Adding members of ipset to BPF blacklist map.") members, err := getIPSetMembers(ipsetIDsToMembers, setID, ipsSource) if err != nil { return err } mi := &memberIterSet{ members: members, refCount: refCount, } if err := processMemberAdds(memberCache, iface, mi); err != nil { return err } } } // drop stuff from maps for iface, memberMap := range a.MembersToDrop { mi := &memberIterMap{ memberMap: memberMap, } if err := processMemberDeletions(memberCache, iface, mi); err != nil { return err } } for iface, setIDMap := range a.RemoveFromMap { for setID, refCount := range setIDMap { logCxt.WithFields(log.Fields{ "iface": iface, "setID": setID, "refCount": refCount, }).Debug("Dropping members of ipset from BPF blacklist map.") members, ok := ipsetIDsToMembers.GetCached(setID) if !ok { return fmt.Errorf("failed to remove members of %s program from %s: ipset not in cache", setID, iface) } mi := &memberIterSet{ members: members, refCount: refCount, } if err := processMemberDeletions(memberCache, iface, mi); err != nil { return err } } } a.InstallXDP.Iter(func(item interface{}) error { iface := item.(string) logCxt.WithField("iface", iface).Debug("Loading XDP program.") var loadErrs []error for _, mode := range xdpModes { if err := memberCache.bpfLib.LoadXDPAuto(iface, mode); err != nil { loadErrs = append(loadErrs, err) } else { logCxt.WithFields(log.Fields{ "iface": iface, "mode": mode, }).Debug("Loading XDP program succeeded.") loadErrs = nil break } } if loadErrs != nil { opErr = fmt.Errorf("failed to load XDP program from %s: %v", iface, loadErrs) return set.StopIteration } return nil }) if opErr != nil { return opErr } logCxt.Debug("Finished processing BPF actions.") return nil } func getXDPModes(allowGenericXDP bool) []bpf.XDPMode { modes := []bpf.XDPMode{ bpf.XDPOffload, bpf.XDPDriver, } if allowGenericXDP { modes = append(modes, bpf.XDPGeneric) } return modes } func getIPSetMembers(ipsetIDsToMembers *ipsetIDsToMembers, setID string, ipsSource ipsetsSource) (set.Set, error) { members, ok := ipsetIDsToMembers.GetCached(setID) if ok { return members, nil } members, err := ipsSource.GetIPSetMembers(setID) if err != nil { return nil, err } ipsetIDsToMembers.SetCache(setID, members) return members, nil } type convertingIPSetsSource struct { realSource ipsetsSource } func newConvertingIPSetsSource(realSource ipsetsSource) ipsetsSource { return &convertingIPSetsSource{ realSource: realSource, } } var _ ipsetsSource = &convertingIPSetsSource{} func (s *convertingIPSetsSource) GetIPSetType(setID string) (ipsets.IPSetType, error) { return s.realSource.GetIPSetType(setID) } func (s *convertingIPSetsSource) GetIPSetMembers(setID string) (set.Set /*<string>*/, error) { members, err := s.realSource.GetIPSetMembers(setID) if err != nil { return nil, err } return s.tryConvert(setID, members) } func (s *convertingIPSetsSource) tryConvert(setID string, members set.Set /*<string>*/) (set.Set /*<string>*/, error) { setType, err := s.GetIPSetType(setID) if err != nil { return nil, err } convertedMembers := convertMembersToMasked(members, setType) return convertedMembers, nil } func convertMembersToMasked(members set.Set, setType ipsets.IPSetType) set.Set { if members == nil { return nil } switch setType { case ipsets.IPSetTypeHashIP: newMembers := set.New() members.Iter(func(item interface{}) error { member := item.(string) newMembers.Add(member + "/32") return nil }) return newMembers case ipsets.IPSetTypeHashNet: return members default: return set.New() } } func processMemberAdds(memberCache *xdpMemberCache, iface string, mi memberIter) error { if mi.Len() == 0 { return nil } logCxt := log.WithField("family", memberCache.GetFamily().String()) bpfMembers, err := memberCache.GetMembers(iface) if err != nil { return err } return mi.Iter(func(member string, refCount uint32) error { ip, mask, err := bpf.MemberToIPMask(member) if err != nil { return err } mapKey, err := memberCache.GetCIDRMapKeyForMember(member) if err != nil { return err } if bpfRefCount, ok := bpfMembers[mapKey]; ok { logCxt.WithFields(log.Fields{ "iface": iface, "oldCount": bpfRefCount, "newCount": bpfRefCount + refCount, "member": member, }).Debug("Updating refcount in BPF blacklist map.") bpfMembers[mapKey] = bpfRefCount + refCount if err := memberCache.bpfLib.UpdateCIDRMap(iface, memberCache.GetFamily(), *ip, mask, bpfRefCount+refCount); err != nil { return err } } else { logCxt.WithFields(log.Fields{ "iface": iface, "refCount": refCount, "member": member, }).Debug("Adding a member in BPF blacklist map.") bpfMembers[mapKey] = refCount if err := memberCache.bpfLib.UpdateCIDRMap(iface, memberCache.GetFamily(), *ip, mask, refCount); err != nil { return err } } return nil }) } func processMemberDeletions(memberCache *xdpMemberCache, iface string, mi memberIter) error { if mi.Len() == 0 { return nil } logCxt := log.WithField("family", memberCache.GetFamily().String()) bpfMembers, err := memberCache.GetMembers(iface) if err != nil { return err } return mi.Iter(func(member string, refCount uint32) error { ip, mask, err := bpf.MemberToIPMask(member) if err != nil { return err } mapKey, err := memberCache.GetCIDRMapKeyForMember(member) if err != nil { return err } if bpfRefCount, ok := bpfMembers[mapKey]; ok { if bpfRefCount < refCount { logCxt.WithFields(log.Fields{ "iface": iface, "oldCount": bpfRefCount, "newCount": bpfRefCount - refCount, "member": member, }).Debug("Can't update refcount in BPF blacklist map.") return fmt.Errorf("wanted to drop refcount of %s (%d) by %d, which would lead to negative refcount", member, bpfRefCount, refCount) } else if bpfRefCount == refCount { logCxt.WithFields(log.Fields{ "iface": iface, "member": member, }).Debug("Dropping a member from BPF blacklist map.") delete(bpfMembers, mapKey) if err := memberCache.bpfLib.RemoveItemCIDRMap(iface, memberCache.GetFamily(), *ip, mask); err != nil { return err } } else { logCxt.WithFields(log.Fields{ "iface": iface, "oldCount": bpfRefCount, "newCount": bpfRefCount - refCount, "member": member, }).Debug("Updating refcount of a member in BPF blacklist map.") bpfMembers[mapKey] = bpfRefCount - refCount if err := memberCache.bpfLib.UpdateCIDRMap(iface, memberCache.GetFamily(), *ip, mask, bpfRefCount-refCount); err != nil { return err } } } else { return fmt.Errorf("expected to have member %s in map for %s %s", member, iface, memberCache.GetFamily().String()) } return nil }) } type xdpIfaceData struct { EpID proto.HostEndpointID PoliciesToSetIDs map[proto.PolicyID]set.Set //<string> } func (data xdpIfaceData) Copy() xdpIfaceData { new := data new.PoliciesToSetIDs = make(map[proto.PolicyID]set.Set, len(data.PoliciesToSetIDs)) for k, v := range data.PoliciesToSetIDs { // this makes shallow copy, but fortunately these are // just strings new.PoliciesToSetIDs[k] = v.Copy() } return new } func (d *xdpIfaceData) NeedsXDP() bool { for _, setIDs := range d.PoliciesToSetIDs { if setIDs.Len() > 0 { return true } } return false } type xdpRules struct { Rules []xdpRule } func (rs xdpRules) Copy() xdpRules { var newRules []xdpRule for _, r := range rs.Rules { newSetIDs := make([]string, len(r.SetIDs)) copy(newSetIDs, r.SetIDs) newRules = append(newRules, xdpRule{SetIDs: newSetIDs}) } return xdpRules{Rules: newRules} } type xdpRule struct { SetIDs []string } type endpointsSource interface { GetRawHostEndpoints() map[proto.HostEndpointID]*proto.HostEndpoint } var _ endpointsSource = &endpointManager{} type nilIPSetsSource struct{} func (n *nilIPSetsSource) GetIPSetType(setID string) (ipsets.IPSetType, error) { return "", nil } func (n *nilIPSetsSource) GetIPSetMembers(setID string) (set.Set /*<string>*/, error) { return set.New(), nil } type ipsetsSource interface { GetIPSetType(setID string) (ipsets.IPSetType, error) GetIPSetMembers(setID string) (set.Set /*<string>*/, error) } var _ ipsetsSource = &ipSetsManager{} var _ ipsetsSource = &nilIPSetsSource{} type xdpMemberCache struct { family bpf.IPFamily cache map[string]map[bpf.CIDRMapKey]uint32 memberToCIDRMapKeyFunc func(member string) (bpf.CIDRMapKey, error) bpfLib bpf.BPFDataplane } func newXDPMemberCache(family bpf.IPFamily, bpfLib bpf.BPFDataplane) *xdpMemberCache { return &xdpMemberCache{ family: family, cache: make(map[string]map[bpf.CIDRMapKey]uint32), memberToCIDRMapKeyFunc: getMemberToCIDRMapKeyFunc(family), bpfLib: bpfLib, } } func getMemberToCIDRMapKeyFunc(family bpf.IPFamily) func(member string) (bpf.CIDRMapKey, error) { maskSizeInBits := func() int { if family == bpf.IPFamilyV4 { return 32 } return 128 }() return func(member string) (bpf.CIDRMapKey, error) { ip, maskLen, err := bpf.MemberToIPMask(member) if err != nil { return bpf.CIDRMapKey{}, err } mask := net.CIDRMask(maskLen, maskSizeInBits) ipnet := &net.IPNet{ IP: *ip, Mask: mask, } return bpf.NewCIDRMapKey(ipnet), nil } } func (c *xdpMemberCache) GetMembers(iface string) (map[bpf.CIDRMapKey]uint32, error) { if members, ok := c.cache[iface]; ok { return members, nil } members, err := c.bpfLib.DumpCIDRMap(iface, c.family) if err != nil { return nil, err } c.cache[iface] = members return members, nil } func (c *xdpMemberCache) GetFamily() bpf.IPFamily { return c.family } func (c *xdpMemberCache) GetCIDRMapKeyForMember(member string) (bpf.CIDRMapKey, error) { return c.memberToCIDRMapKeyFunc(member) } type xdpResyncState struct { ifacesWithProgs map[string]progInfo ifacesWithMaps map[string]mapInfo ipsetMembers map[string]set.Set } type progInfo struct { bogus bool } type mapInfo struct { bogus bool mismatched bool contents map[bpf.CIDRMapKey]uint32 } type memberIterMap struct { memberMap map[string]uint32 } func (m *memberIterMap) Iter(f func(member string, refCount uint32) error) error { for member, refCount := range m.memberMap { if err := f(member, refCount); err != nil { return err } } return nil } func (m *memberIterMap) Len() int { return len(m.memberMap) } type memberIterSet struct { members set.Set refCount uint32 } func (m *memberIterSet) Iter(f func(member string, refCount uint32) error) error { var opErr error m.members.Iter(func(item interface{}) error { member := item.(string) if err := f(member, m.refCount); err != nil { opErr = err return set.StopIteration } return nil }) return opErr } func (m *memberIterSet) Len() int { return m.members.Len() } type memberIter interface { Iter(func(member string, refCount uint32) error) error Len() int } var _ memberIter = &memberIterSet{} var _ memberIter = &memberIterMap{}
1
19,135
Yep, looks right; your new selector _should_ disqualify the rule from being XDP accelerated.
projectcalico-felix
go
@@ -0,0 +1,12 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +using System; + +namespace Microsoft.AspNetCore.Connections.Features +{ + public interface IConnectionHeartbeatTickFeature + { + void OnHeartbeat(Action<DateTimeOffset, object> action, object state); + } +}
1
1
16,384
cc @anurse This exists now. Kestrel currently passes the current time to all callbacks as an optimization. Just an FYI.
aspnet-KestrelHttpServer
.cs
@@ -126,7 +126,7 @@ class Entry(MSONable, metaclass=ABCMeta): elif mode == "formula_unit": factor = self.composition.get_reduced_composition_and_factor()[1] else: - raise ValueError(f"{mode=} is not an allowed option for normalization") + raise ValueError(f"{mode} is not an allowed option for normalization") return factor
1
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ Entries are containers for calculated information, which is used in many analyses. This module contains entry related tools and implements the base Entry class, which is the basic entity that can be used to store calculated information. Other Entry classes such as ComputedEntry and PDEntry inherit from this class. """ import sys from abc import ABCMeta, abstractmethod from numbers import Number from typing import Dict, Union import numpy as np from monty.json import MSONable from pymatgen.core.composition import Composition if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal __author__ = "Shyue Ping Ong, Anubhav Jain, Ayush Gupta" __copyright__ = "Copyright 2020, The Materials Project" __version__ = "1.1" __maintainer__ = "Shyue Ping Ong" __email__ = "[email protected]" __status__ = "Production" __date__ = "Mar 03, 2020" class Entry(MSONable, metaclass=ABCMeta): """ A lightweight object containing the energy associated with a specific chemical composition. This base class is not intended to be instantiated directly. Note that classes which inherit from Entry must define a .energy property. """ def __init__( self, composition: Union[Composition, str, Dict[str, float]], energy: float, ): """ Initializes an Entry. Args: composition (Composition): Composition of the entry. For flexibility, this can take the form of all the typical input taken by a Composition, including a {symbol: amt} dict, a string formula, and others. energy (float): Energy of the entry. """ self._composition = Composition(composition) self._energy = energy @property def is_element(self) -> bool: """ :return: Whether composition of entry is an element. """ # NOTE _composition rather than composition as GrandPDEntry # edge case exists if we have a compound where chempots are # given for all bar one element type return self._composition.is_element @property def composition(self) -> Composition: """ :return: the composition of the entry. """ return self._composition @property @abstractmethod def energy(self) -> float: """ :return: the energy of the entry. """ @property def energy_per_atom(self) -> float: """ :return: the energy per atom of the entry. """ return self.energy / self.composition.num_atoms def __repr__(self): return "{} : {} with energy = {:.4f}".format(self.__class__.__name__, self.composition, self.energy) def __str__(self): return self.__repr__() def normalize(self, mode: Literal["formula_unit", "atom"] = "formula_unit") -> "Entry": """ Normalize the entry's composition and energy. Args: mode ("formula_unit" | "atom"): "formula_unit" (the default) normalizes to composition.reduced_formula. "atom" normalizes such that the composition amounts sum to 1. """ factor = self._normalization_factor(mode) new_composition = self._composition / factor new_energy = self._energy / factor new_entry_dict = self.as_dict() new_entry_dict["composition"] = new_composition.as_dict() new_entry_dict["energy"] = new_energy return self.from_dict(new_entry_dict) def _normalization_factor(self, mode: Literal["formula_unit", "atom"] = "formula_unit") -> float: # NOTE here we use composition rather than _composition in order to ensure # that we have the expected behavior downstream in cases where composition # is overwritten (GrandPotPDEntry, TransformedPDEntry) if mode == "atom": factor = self.composition.num_atoms elif mode == "formula_unit": factor = self.composition.get_reduced_composition_and_factor()[1] else: raise ValueError(f"{mode=} is not an allowed option for normalization") return factor def as_dict(self) -> dict: """ :return: MSONable dict. """ return { "@module": self.__class__.__module__, "@class": self.__class__.__name__, "energy": self._energy, "composition": self._composition.as_dict(), } def __eq__(self, other): # NOTE: Scaled duplicates i.e. physically equivalent materials # are not equal unless normalized separately. if self is other: return True # Equality is defined based on composition and energy # If structures are involved, it is assumed that a {composition, energy} is # vanishingly unlikely to be the same if the structures are different if not np.allclose(self.energy, other.energy): return False return self.composition == other.composition def _is_dict_eq(self, other): """ Check if entry dicts are equal using a robust check for numerical values. """ self_dict = self.as_dict() other_dict = other.as_dict() # NOTE use implicit generator to allow all() to short-circuit return all(_is_robust_eq(other_dict[k], v) for k, v in self_dict.items()) def __hash__(self): # NOTE truncate _energy to 8 dp to ensure same robustness # as np.allclose return hash(f"{self.__class__.__name__}" f"{self._composition.formula}" f"{self._energy:.8f}") def _is_robust_eq(v_self, v_other): """ Use np.allclose for numerical values for robustness otherwise use default __eq__. NOTE robustness doesn't reach to nested structures i.e. For a ComputedStructureEntry where parameters stores the Incar this would not be robust to fp changes in that Incar dictionary. For a GrandPotPDEntry it will not be robust to fp changes in the chempots """ if isinstance(v_self, Number) and isinstance(v_other, Number): return np.allclose(v_self, v_other, atol=1e-8) return v_self == v_other
1
20,121
remove 3.8+ f-string format to allow use in google colab
materialsproject-pymatgen
py
@@ -196,6 +196,10 @@ save_xmm(dcontext_t *dcontext, sigframe_rt_t *frame) int i; sigcontext_t *sc = get_sigcontext_from_rt_frame(frame); kernel_xstate_t *xstate = (kernel_xstate_t *)sc->fpstate; + /* For pending signals, the alignment of xstate ultimately depends on the alignment of + * the special allocator's blocks. Ths is fragile, and assertions here and at + * initialization are providing some coverage for this. + */ if (!preserve_xmm_caller_saved()) return; if (xstate_has_extra_fields) {
1
/* ********************************************************** * Copyright (c) 2011-2019 Google, Inc. All rights reserved. * Copyright (c) 2000-2010 VMware, Inc. All rights reserved. * **********************************************************/ /* * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of VMware, Inc. nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ /* Copyright (c) 2003-2007 Determina Corp. */ /* Copyright (c) 2001-2003 Massachusetts Institute of Technology */ /* Copyright (c) 2000-2001 Hewlett-Packard Company */ /* * signal_linux_x86.c - Linux and X86 specific signal code */ #include "signal_private.h" /* pulls in globals.h for us, in right order */ #ifndef LINUX # error Linux-only #endif #ifndef X86 # error X86-only #endif #include "arch.h" /* We have to dynamically size kernel_xstate_t to account for kernel changes * over time. */ static size_t xstate_size; static bool xstate_has_extra_fields; #define XSTATE_QUERY_SIG SIGILL /**** floating point support ********************************************/ /* The following code is based on routines in * /usr/src/linux/arch/i386/kernel/i387.c * and definitions in * /usr/src/linux/include/asm-i386/processor.h * /usr/src/linux/include/asm-i386/i387.h */ struct i387_fsave_struct { long cwd; long swd; long twd; long fip; long fcs; long foo; long fos; long st_space[20]; /* 8*10 bytes for each FP-reg = 80 bytes */ long status; /* software status information */ }; /* note that fxsave requires that i387_fxsave_struct be aligned on * a 16-byte boundary */ struct i387_fxsave_struct { unsigned short cwd; unsigned short swd; unsigned short twd; unsigned short fop; #ifdef X64 long rip; long rdp; int mxcsr; int mxcsr_mask; int st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */ int xmm_space[64]; /* 16*16 bytes for each XMM-reg = 256 bytes */ int padding[24]; #else long fip; long fcs; long foo; long fos; long mxcsr; long reserved; long st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */ long xmm_space[32]; /* 8*16 bytes for each XMM-reg = 128 bytes */ long padding[56]; #endif } __attribute__((aligned(16))); union i387_union { struct i387_fsave_struct fsave; struct i387_fxsave_struct fxsave; }; #ifndef X64 /* For 32-bit if we use fxsave we need to convert it to the kernel's struct. * For 64-bit the kernel's struct is identical to the fxsave format. */ static uint twd_fxsr_to_i387(struct i387_fxsave_struct *fxsave) { kernel_fpxreg_t *st = NULL; uint twd = (uint)fxsave->twd; uint tag; uint ret = 0xffff0000; int i; for (i = 0; i < 8; i++) { if (TEST(0x1, twd)) { st = (kernel_fpxreg_t *)&fxsave->st_space[i * 4]; switch (st->exponent & 0x7fff) { case 0x7fff: tag = 2; /* Special */ break; case 0x0000: if (st->significand[0] == 0 && st->significand[1] == 0 && st->significand[2] == 0 && st->significand[3] == 0) { tag = 1; /* Zero */ } else { tag = 2; /* Special */ } break; default: if (TEST(0x8000, st->significand[3])) { tag = 0; /* Valid */ } else { tag = 2; /* Special */ } break; } } else { tag = 3; /* Empty */ } ret |= (tag << (2 * i)); twd = twd >> 1; } return ret; } static void convert_fxsave_to_fpstate(kernel_fpstate_t *fpstate, struct i387_fxsave_struct *fxsave) { int i; fpstate->cw = (uint)fxsave->cwd | 0xffff0000; fpstate->sw = (uint)fxsave->swd | 0xffff0000; fpstate->tag = twd_fxsr_to_i387(fxsave); fpstate->ipoff = fxsave->fip; fpstate->cssel = fxsave->fcs | ((uint)fxsave->fop << 16); fpstate->dataoff = fxsave->foo; fpstate->datasel = fxsave->fos; for (i = 0; i < 8; i++) { memcpy(&fpstate->_st[i], &fxsave->st_space[i * 4], sizeof(fpstate->_st[i])); } fpstate->status = fxsave->swd; fpstate->magic = X86_FXSR_MAGIC; memcpy(&fpstate->_fxsr_env[0], fxsave, sizeof(struct i387_fxsave_struct)); } #endif /* !X64 */ static void save_xmm(dcontext_t *dcontext, sigframe_rt_t *frame) { /* The app's xmm registers may be saved away in priv_mcontext_t, in which * case we need to copy those values instead of using what was in * the physical xmm registers. * Because of this, we can't just execute "xsave". We still need to * execute xgetbv though. Xsave is very expensive so not worth doing * when xgetbv is all we need, so we avoid it unless there are extra fields. */ int i; sigcontext_t *sc = get_sigcontext_from_rt_frame(frame); kernel_xstate_t *xstate = (kernel_xstate_t *)sc->fpstate; if (!preserve_xmm_caller_saved()) return; if (xstate_has_extra_fields) { /* Fill in the extra fields first and then clobber xmm+ymm below. * We assume that DR's code does not touch this extra state. */ ASSERT(ALIGNED(xstate, AVX_ALIGNMENT)); /* A processor w/o xsave but w/ extra xstate fields should not exist. */ ASSERT(proc_has_feature(FEATURE_XSAVE)); /* XXX i#1312: use xsaveopt if available (need to add FEATURE_XSAVEOPT) */ #ifdef X64 /* Some assemblers, including on Travis, don't know "xsave64", so we * have to use raw bytes for: * 48 0f ae 20 xsave64 (%rax) */ asm volatile("mov %0, %%rax; .byte 0x48; .byte 0x0f; .byte 0xae; .byte 0x20" : "=m"(xstate) : : "rax"); #else asm volatile("xsave %0" : "=m"(*xstate)); #endif } if (YMM_ENABLED()) { /* all ymm regs are in our mcontext. the only other thing * in xstate is the xgetbv. */ uint bv_high, bv_low; dr_xgetbv(&bv_high, &bv_low); xstate->xstate_hdr.xstate_bv = (((uint64)bv_high) << 32) | bv_low; } for (i = 0; i < proc_num_simd_sse_avx_saved(); i++) { /* we assume no padding */ #ifdef X64 /* __u32 xmm_space[64] */ memcpy(&sc->fpstate->xmm_space[i * 4], &get_mcontext(dcontext)->simd[i], XMM_REG_SIZE); if (YMM_ENABLED()) { /* i#637: ymm top halves are inside kernel_xstate_t */ memcpy(&xstate->ymmh.ymmh_space[i * 4], ((void *)&get_mcontext(dcontext)->simd[i]) + XMM_REG_SIZE, YMMH_REG_SIZE); } #else memcpy(&sc->fpstate->_xmm[i], &get_mcontext(dcontext)->simd[i], XMM_REG_SIZE); if (YMM_ENABLED()) { /* i#637: ymm top halves are inside kernel_xstate_t */ memcpy(&xstate->ymmh.ymmh_space[i * 4], ((void *)&get_mcontext(dcontext)->simd[i]) + XMM_REG_SIZE, YMMH_REG_SIZE); } #endif } /* XXX i#1312: AVX-512 extended register copies missing yet. */ } /* We can't tell whether the app has used fpstate yet so we preserve every time * (i#641 covers optimizing that) */ void save_fpstate(dcontext_t *dcontext, sigframe_rt_t *frame) { /* FIXME: is there a better way to align this thing? * the __attribute__ on the struct above doesn't do it */ char align[sizeof(union i387_union) + 16]; union i387_union *temp = (union i387_union *)((((ptr_uint_t)align) + 16) & ((ptr_uint_t)-16)); sigcontext_t *sc = get_sigcontext_from_rt_frame(frame); LOG(THREAD, LOG_ASYNCH, 3, "save_fpstate\n"); if (sc->fpstate == NULL) { /* Nothing to do: there was no fpstate to save at the time the kernel * gave us this frame. * It's possible that by the time we deliver the signal * there is some state: but it's up to the caller to set up room * for fpstate and point at it in that case. */ return; } else { LOG(THREAD, LOG_ASYNCH, 3, "ptr=" PFX "\n", sc->fpstate); } if (proc_has_feature(FEATURE_FXSR)) { LOG(THREAD, LOG_ASYNCH, 3, "\ttemp=" PFX "\n", temp); #ifdef X64 /* this is "unlazy_fpu" */ /* fxsaveq is only supported with gas >= 2.16 but we have that */ asm volatile("fxsaveq %0 ; fnclex" : "=m"(temp->fxsave)); /* now convert into kernel_fpstate_t form */ ASSERT(sizeof(kernel_fpstate_t) == sizeof(struct i387_fxsave_struct)); memcpy(sc->fpstate, &temp->fxsave, sizeof(struct i387_fxsave_struct)); #else /* this is "unlazy_fpu" */ asm volatile("fxsave %0 ; fnclex" : "=m"(temp->fxsave)); /* now convert into kernel_fpstate_t form */ convert_fxsave_to_fpstate(sc->fpstate, &temp->fxsave); #endif } else { /* FIXME NYI: need to convert to fxsave format for sc->fpstate */ IF_X64(ASSERT_NOT_IMPLEMENTED(false)); /* this is "unlazy_fpu" */ asm volatile("fnsave %0 ; fwait" : "=m"(temp->fsave)); /* now convert into kernel_fpstate_t form */ temp->fsave.status = temp->fsave.swd; memcpy(sc->fpstate, &temp->fsave, sizeof(struct i387_fsave_struct)); } save_xmm(dcontext, frame); } #ifdef DEBUG static void dump_fpstate(dcontext_t *dcontext, kernel_fpstate_t *fp) { int i, j; # ifdef X64 LOG(THREAD, LOG_ASYNCH, 1, "\tcwd=" PFX "\n", fp->cwd); LOG(THREAD, LOG_ASYNCH, 1, "\tswd=" PFX "\n", fp->swd); LOG(THREAD, LOG_ASYNCH, 1, "\ttwd=" PFX "\n", fp->twd); LOG(THREAD, LOG_ASYNCH, 1, "\tfop=" PFX "\n", fp->fop); LOG(THREAD, LOG_ASYNCH, 1, "\trip=" PFX "\n", fp->rip); LOG(THREAD, LOG_ASYNCH, 1, "\trdp=" PFX "\n", fp->rdp); LOG(THREAD, LOG_ASYNCH, 1, "\tmxcsr=" PFX "\n", fp->mxcsr); LOG(THREAD, LOG_ASYNCH, 1, "\tmxcsr_mask=" PFX "\n", fp->mxcsr_mask); for (i = 0; i < 8; i++) { LOG(THREAD, LOG_ASYNCH, 1, "\tst%d = 0x", i); for (j = 0; j < 4; j++) { LOG(THREAD, LOG_ASYNCH, 1, "%08x", fp->st_space[i * 4 + j]); } LOG(THREAD, LOG_ASYNCH, 1, "\n"); } for (i = 0; i < 16; i++) { LOG(THREAD, LOG_ASYNCH, 1, "\txmm%d = 0x", i); for (j = 0; j < 4; j++) { LOG(THREAD, LOG_ASYNCH, 1, "%08x", fp->xmm_space[i * 4 + j]); } LOG(THREAD, LOG_ASYNCH, 1, "\n"); } # else LOG(THREAD, LOG_ASYNCH, 1, "\tcw=" PFX "\n", fp->cw); LOG(THREAD, LOG_ASYNCH, 1, "\tsw=" PFX "\n", fp->sw); LOG(THREAD, LOG_ASYNCH, 1, "\ttag=" PFX "\n", fp->tag); LOG(THREAD, LOG_ASYNCH, 1, "\tipoff=" PFX "\n", fp->ipoff); LOG(THREAD, LOG_ASYNCH, 1, "\tcssel=" PFX "\n", fp->cssel); LOG(THREAD, LOG_ASYNCH, 1, "\tdataoff=" PFX "\n", fp->dataoff); LOG(THREAD, LOG_ASYNCH, 1, "\tdatasel=" PFX "\n", fp->datasel); for (i = 0; i < 8; i++) { LOG(THREAD, LOG_ASYNCH, 1, "\tst%d = ", i); for (j = 0; j < 4; j++) LOG(THREAD, LOG_ASYNCH, 1, "%04x ", fp->_st[i].significand[j]); LOG(THREAD, LOG_ASYNCH, 1, "^ %04x\n", fp->_st[i].exponent); } LOG(THREAD, LOG_ASYNCH, 1, "\tstatus=0x%04x\n", fp->status); LOG(THREAD, LOG_ASYNCH, 1, "\tmagic=0x%04x\n", fp->magic); /* FXSR FPU environment */ for (i = 0; i < 6; i++) LOG(THREAD, LOG_ASYNCH, 1, "\tfxsr_env[%d] = " PFX "\n", i, fp->_fxsr_env[i]); LOG(THREAD, LOG_ASYNCH, 1, "\tmxcsr=" PFX "\n", fp->mxcsr); LOG(THREAD, LOG_ASYNCH, 1, "\treserved=" PFX "\n", fp->reserved); for (i = 0; i < 8; i++) { LOG(THREAD, LOG_ASYNCH, 1, "\tfxsr_st%d = ", i); for (j = 0; j < 4; j++) LOG(THREAD, LOG_ASYNCH, 1, "%04x ", fp->_fxsr_st[i].significand[j]); LOG(THREAD, LOG_ASYNCH, 1, "^ %04x\n", fp->_fxsr_st[i].exponent); /* ignore padding */ } for (i = 0; i < 8; i++) { LOG(THREAD, LOG_ASYNCH, 1, "\txmm%d = ", i); for (j = 0; j < 4; j++) LOG(THREAD, LOG_ASYNCH, 1, "%04x ", fp->_xmm[i].element[j]); LOG(THREAD, LOG_ASYNCH, 1, "\n"); } # endif /* Ignore padding. */ if (YMM_ENABLED()) { kernel_xstate_t *xstate = (kernel_xstate_t *)fp; if (fp->sw_reserved.magic1 == FP_XSTATE_MAGIC1) { /* i#718: for 32-bit app on 64-bit OS, the xstate_size in sw_reserved * is obtained via cpuid, which is the xstate size of 64-bit arch. */ ASSERT(fp->sw_reserved.extended_size >= sizeof(*xstate)); ASSERT(TEST(XCR0_AVX, fp->sw_reserved.xstate_bv)); LOG(THREAD, LOG_ASYNCH, 1, "\txstate_bv = 0x" HEX64_FORMAT_STRING "\n", xstate->xstate_hdr.xstate_bv); for (i = 0; i < proc_num_simd_sse_avx_registers(); i++) { LOG(THREAD, LOG_ASYNCH, 1, "\tymmh%d = ", i); for (j = 0; j < 4; j++) { LOG(THREAD, LOG_ASYNCH, 1, "%04x ", xstate->ymmh.ymmh_space[i * 4 + j]); } LOG(THREAD, LOG_ASYNCH, 1, "\n"); } } } /* XXX i#1312: Dumping AVX-512 extended registers missing yet. */ } void dump_sigcontext(dcontext_t *dcontext, sigcontext_t *sc) { LOG(THREAD, LOG_ASYNCH, 1, "\tgs=0x%04x" IF_NOT_X64(", __gsh=0x%04x") "\n", sc->gs _IF_NOT_X64(sc->__gsh)); LOG(THREAD, LOG_ASYNCH, 1, "\tfs=0x%04x" IF_NOT_X64(", __fsh=0x%04x") "\n", sc->fs _IF_NOT_X64(sc->__fsh)); # ifndef X64 LOG(THREAD, LOG_ASYNCH, 1, "\tes=0x%04x, __esh=0x%04x\n", sc->es, sc->__esh); LOG(THREAD, LOG_ASYNCH, 1, "\tds=0x%04x, __dsh=0x%04x\n", sc->ds, sc->__dsh); # endif LOG(THREAD, LOG_ASYNCH, 1, "\txdi=" PFX "\n", sc->SC_XDI); LOG(THREAD, LOG_ASYNCH, 1, "\txsi=" PFX "\n", sc->SC_XSI); LOG(THREAD, LOG_ASYNCH, 1, "\txbp=" PFX "\n", sc->SC_XBP); LOG(THREAD, LOG_ASYNCH, 1, "\txsp=" PFX "\n", sc->SC_XSP); LOG(THREAD, LOG_ASYNCH, 1, "\txbx=" PFX "\n", sc->SC_XBX); LOG(THREAD, LOG_ASYNCH, 1, "\txdx=" PFX "\n", sc->SC_XDX); LOG(THREAD, LOG_ASYNCH, 1, "\txcx=" PFX "\n", sc->SC_XCX); LOG(THREAD, LOG_ASYNCH, 1, "\txax=" PFX "\n", sc->SC_XAX); # ifdef X64 LOG(THREAD, LOG_ASYNCH, 1, "\t r8=" PFX "\n", sc->r8); LOG(THREAD, LOG_ASYNCH, 1, "\t r9=" PFX "\n", sc->r8); LOG(THREAD, LOG_ASYNCH, 1, "\tr10=" PFX "\n", sc->r10); LOG(THREAD, LOG_ASYNCH, 1, "\tr11=" PFX "\n", sc->r11); LOG(THREAD, LOG_ASYNCH, 1, "\tr12=" PFX "\n", sc->r12); LOG(THREAD, LOG_ASYNCH, 1, "\tr13=" PFX "\n", sc->r13); LOG(THREAD, LOG_ASYNCH, 1, "\tr14=" PFX "\n", sc->r14); LOG(THREAD, LOG_ASYNCH, 1, "\tr15=" PFX "\n", sc->r15); # endif LOG(THREAD, LOG_ASYNCH, 1, "\ttrapno=" PFX "\n", sc->trapno); LOG(THREAD, LOG_ASYNCH, 1, "\terr=" PFX "\n", sc->err); LOG(THREAD, LOG_ASYNCH, 1, "\txip=" PFX "\n", sc->SC_XIP); LOG(THREAD, LOG_ASYNCH, 1, "\tcs=0x%04x" IF_NOT_X64(", __esh=0x%04x") "\n", sc->cs _IF_NOT_X64(sc->__csh)); LOG(THREAD, LOG_ASYNCH, 1, "\teflags=" PFX "\n", sc->SC_XFLAGS); # ifndef X64 LOG(THREAD, LOG_ASYNCH, 1, "\tesp_at_signal=" PFX "\n", sc->esp_at_signal); LOG(THREAD, LOG_ASYNCH, 1, "\tss=0x%04x, __ssh=0x%04x\n", sc->ss, sc->__ssh); # endif if (sc->fpstate == NULL) LOG(THREAD, LOG_ASYNCH, 1, "\tfpstate=<NULL>\n"); else dump_fpstate(dcontext, sc->fpstate); LOG(THREAD, LOG_ASYNCH, 1, "\toldmask=" PFX "\n", sc->oldmask); LOG(THREAD, LOG_ASYNCH, 1, "\tcr2=" PFX "\n", sc->cr2); } #endif /* DEBUG */ void sigcontext_to_mcontext_simd(priv_mcontext_t *mc, sig_full_cxt_t *sc_full) { sigcontext_t *sc = sc_full->sc; if (sc->fpstate != NULL) { int i; for (i = 0; i < proc_num_simd_sse_avx_registers(); i++) { memcpy(&mc->simd[i], &sc->fpstate->IF_X64_ELSE(xmm_space[i * 4], _xmm[i]), XMM_REG_SIZE); } if (YMM_ENABLED()) { kernel_xstate_t *xstate = (kernel_xstate_t *)sc->fpstate; if (sc->fpstate->sw_reserved.magic1 == FP_XSTATE_MAGIC1) { /* i#718: for 32-bit app on 64-bit OS, the xstate_size in sw_reserved * is obtained via cpuid, which is the xstate size of 64-bit arch. */ ASSERT(sc->fpstate->sw_reserved.extended_size >= sizeof(*xstate)); ASSERT(TEST(XCR0_AVX, sc->fpstate->sw_reserved.xstate_bv)); for (i = 0; i < proc_num_simd_sse_avx_registers(); i++) { memcpy(&mc->simd[i].u32[4], &xstate->ymmh.ymmh_space[i * 4], YMMH_REG_SIZE); } } } } /* XXX i#1312: AVX-512 extended register copies missing yet. */ } void mcontext_to_sigcontext_simd(sig_full_cxt_t *sc_full, priv_mcontext_t *mc) { sigcontext_t *sc = sc_full->sc; if (sc->fpstate != NULL) { int i; for (i = 0; i < proc_num_simd_sse_avx_registers(); i++) { memcpy(&sc->fpstate->IF_X64_ELSE(xmm_space[i * 4], _xmm[i]), &mc->simd[i], XMM_REG_SIZE); } if (YMM_ENABLED()) { kernel_xstate_t *xstate = (kernel_xstate_t *)sc->fpstate; if (sc->fpstate->sw_reserved.magic1 == FP_XSTATE_MAGIC1) { /* i#718: for 32-bit app on 64-bit OS, the xstate_size in sw_reserved * is obtained via cpuid, which is the xstate size of 64-bit arch. */ ASSERT(sc->fpstate->sw_reserved.extended_size >= sizeof(*xstate)); ASSERT(TEST(XCR0_AVX, sc->fpstate->sw_reserved.xstate_bv)); for (i = 0; i < proc_num_simd_sse_avx_registers(); i++) { memcpy(&xstate->ymmh.ymmh_space[i * 4], &mc->simd[i].u32[4], YMMH_REG_SIZE); } } } } /* XXX i#1312: AVX-512 extended register copies missing yet. */ } size_t signal_frame_extra_size(bool include_alignment) { /* Extra space needed to put the signal frame on the app stack. We include the * size of the extra padding potentially needed to align these structs. We * assume the stack pointer is 4-aligned already, so we over estimate padding * size by the alignment minus 4. */ size_t size = YMM_ENABLED() ? xstate_size : sizeof(kernel_fpstate_t); if (include_alignment) size += (YMM_ENABLED() ? AVX_ALIGNMENT : FPSTATE_ALIGNMENT) - 4; return size; } /* To handle varying xstate sizes as kernels add more state over time, we query * the size by sending ourselves a signal at init time and reading what the * kernel saved. We assume that DR's own code does not touch this state, so * that we can update it to the app's latest at delivery time by executing * xsave in save_xmm(). * * XXX: If the kernel ever does lazy state saving for any part of the new state * and that affects the size, like it does with fpstate, this initial signal * state may not match later state. Currently it seems to be all-or-nothing. */ static void xstate_query_signal_handler(int sig, kernel_siginfo_t *siginfo, kernel_ucontext_t *ucxt) { ASSERT_CURIOSITY(sig == XSTATE_QUERY_SIG); if (sig == XSTATE_QUERY_SIG) { sigcontext_t *sc = SIGCXT_FROM_UCXT(ucxt); if (YMM_ENABLED() && sc->fpstate != NULL) { ASSERT_CURIOSITY(sc->fpstate->sw_reserved.magic1 == FP_XSTATE_MAGIC1); LOG(GLOBAL, LOG_ASYNCH, 1, "orig xstate size = " SZFMT "\n", xstate_size); if (sc->fpstate->sw_reserved.extended_size != xstate_size) { xstate_size = sc->fpstate->sw_reserved.extended_size; xstate_has_extra_fields = true; } LOG(GLOBAL, LOG_ASYNCH, 1, "new xstate size = " SZFMT "\n", xstate_size); } else { /* i#2438: we force-initialized xmm state in signal_arch_init(). * But, on WSL it's still NULL (i#1896) so we make this just a curiosity * until we've tackled signals on WSL. */ ASSERT_CURIOSITY(sc->fpstate != NULL); } } } void signal_arch_init(void) { xstate_size = sizeof(kernel_xstate_t) + 4 /* trailing FP_XSTATE_MAGIC2 */; if (YMM_ENABLED() && !standalone_library /* avoid SIGILL for standalone */) { kernel_sigaction_t act, oldact; int rc; /* i#2438: it's possible that our init code to this point has not yet executed * fpu or xmm operations and that thus fpstate will be NULL. We force it * with an explicit xmm ref here. We mark it "asm volatile" to prevent the * compiler from optimizing it away. * XXX i#641, i#639: this breaks transparency to some extent until the * app uses fpu/xmm but we live with it. */ __asm__ __volatile__("movd %%xmm0, %0" : "=g"(rc)); memset(&act, 0, sizeof(act)); set_handler_sigact(&act, XSTATE_QUERY_SIG, (handler_t)xstate_query_signal_handler); rc = sigaction_syscall(XSTATE_QUERY_SIG, &act, &oldact); ASSERT(rc == 0); thread_signal(get_process_id(), get_sys_thread_id(), XSTATE_QUERY_SIG); rc = sigaction_syscall(XSTATE_QUERY_SIG, &oldact, NULL); ASSERT(rc == 0); } }
1
17,995
Should we assert that xstate is aligned here?
DynamoRIO-dynamorio
c
@@ -49,8 +49,8 @@ import com.tdunning.math.stats.AVLTreeDigest; // TestCloudJSONFacetJoinDomain for random field faceting tests with domain modifications // TestJsonFacetRefinement for refinement tests [email protected]({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) -public class TestJsonFacets extends SolrTestCaseHS { [email protected]({"Lucene3x","Lucene40","Lucene41","FST50","Direct","Lucene42","Lucene45","Appending","BlockTreeOrds","FSTOrd50"}) +public class TestJsonFacets extends SolrTestCaseHS { private static SolrInstances servers; // for distributed testing private static int origTableSize;
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search.facet; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicLong; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.macro.MacroExpander; import org.apache.solr.util.hll.HLL; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.tdunning.math.stats.AVLTreeDigest; // Related tests: // TestCloudJSONFacetJoinDomain for random field faceting tests with domain modifications // TestJsonFacetRefinement for refinement tests @LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) public class TestJsonFacets extends SolrTestCaseHS { private static SolrInstances servers; // for distributed testing private static int origTableSize; private static FacetField.FacetMethod origDefaultFacetMethod; @BeforeClass public static void beforeTests() throws Exception { JSONTestUtil.failRepeatedKeys = true; origTableSize = FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE; FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE=2; // stress test resizing origDefaultFacetMethod = FacetField.FacetMethod.DEFAULT_METHOD; // instead of the following, see the constructor //FacetField.FacetMethod.DEFAULT_METHOD = rand(FacetField.FacetMethod.values()); // we need DVs on point fields to compute stats & facets if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); initCore("solrconfig-tlog.xml","schema_latest.xml"); } /** * Start all servers for cluster, initialize shards whitelist and then restart */ public static void initServers() throws Exception { if (servers == null) { servers = new SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml"); // Set the shards whitelist to all shards plus the fake one used for tolerant test System.setProperty(SOLR_TESTS_SHARDS_WHITELIST, servers.getWhitelistString() + ",http://[ff01::114]:33332"); systemSetPropertySolrDisableShardsWhitelist("false"); restartServers(); } } /** * Restart all configured servers, i.e. configuration will be re-read */ public static void restartServers() { servers.slist.forEach(s -> { try { s.stop(); s.start(); } catch (Exception e) { fail("Exception during server restart: " + e.getMessage()); } }); } @AfterClass public static void afterTests() throws Exception { System.clearProperty(SOLR_TESTS_SHARDS_WHITELIST); systemClearPropertySolrDisableShardsWhitelist(); JSONTestUtil.failRepeatedKeys = false; FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE=origTableSize; FacetField.FacetMethod.DEFAULT_METHOD = origDefaultFacetMethod; if (servers != null) { servers.stop(); servers = null; } } // tip: when debugging failures, change this variable to DEFAULT_METHOD // (or if only one method is problematic, set to that explicitly) private static final FacetField.FacetMethod TEST_ONLY_ONE_FACET_METHOD = null; // FacetField.FacetMethod.DEFAULT_METHOD; @ParametersFactory public static Iterable<Object[]> parameters() { if (null != TEST_ONLY_ONE_FACET_METHOD) { return Arrays.<Object[]>asList(new Object[] { TEST_ONLY_ONE_FACET_METHOD }); } // wrap each enum val in an Object[] and return as Iterable return () -> Arrays.stream(FacetField.FacetMethod.values()) .map(it -> new Object[]{it}).iterator(); } public TestJsonFacets(FacetField.FacetMethod defMethod) { FacetField.FacetMethod.DEFAULT_METHOD = defMethod; // note: the real default is restored in afterTests } // attempt to reproduce https://github.com/Heliosearch/heliosearch/issues/33 @Test public void testComplex() throws Exception { Random r = random(); Client client = Client.localClient; double price_low = 11000; double price_high = 100000; ModifiableSolrParams p = params("make_s","make_s", "model_s","model_s", "price_low",Double.toString(price_low), "price_high",Double.toString(price_high)); MacroExpander m = new MacroExpander( p.getMap() ); String make_s = m.expand("${make_s}"); String model_s = m.expand("${model_s}"); client.deleteByQuery("*:*", null); int nDocs = 99; String[] makes = {"honda", "toyota", "ford", null}; Double[] prices = {10000.0, 30000.0, 50000.0, 0.0, null}; String[] honda_models = {"accord", "civic", "fit", "pilot", null}; // make sure this is alphabetized to match tiebreaks in index String[] other_models = {"z1", "z2", "z3", "z4", "z5", "z6", null}; int nHonda = 0; final int[] honda_model_counts = new int[honda_models.length]; for (int i=0; i<nDocs; i++) { SolrInputDocument doc = sdoc("id", Integer.toString(i)); Double price = rand(prices); if (price != null) { doc.addField("cost_f", price); } boolean matches_price = price!=null && price >= price_low && price <= price_high; String make = rand(makes); if (make != null) { doc.addField(make_s, make); } if ("honda".equals(make)) { int modelNum = r.nextInt(honda_models.length); String model = honda_models[modelNum]; if (model != null) { doc.addField(model_s, model); } if (matches_price) { nHonda++; honda_model_counts[modelNum]++; } } else if (make == null) { doc.addField(model_s, rand(honda_models)); // add some docs w/ model but w/o make } else { // other makes doc.addField(model_s, rand(other_models)); // add some docs w/ model but w/o make } client.add(doc, null); if (r.nextInt(10) == 0) { client.add(doc, null); // dup, causing a delete } if (r.nextInt(20) == 0) { client.commit(); // force new seg } } client.commit(); // now figure out top counts List<Integer> idx = new ArrayList<>(); for (int i=0; i<honda_model_counts.length-1; i++) { idx.add(i); } Collections.sort(idx, (o1, o2) -> { int cmp = honda_model_counts[o2] - honda_model_counts[o1]; return cmp == 0 ? o1 - o2 : cmp; }); // straight query facets client.testJQ(params(p, "q", "*:*", "rows","0", "fq","+${make_s}:honda +cost_f:[${price_low} TO ${price_high}]" , "json.facet", "{makes:{terms:{field:${make_s}, facet:{models:{terms:{field:${model_s}, limit:2, mincount:0}}}}}}}" , "facet","true", "facet.pivot","make_s,model_s", "facet.limit", "2" ) , "facets=={count:" + nHonda + ", makes:{buckets:[{val:honda, count:" + nHonda + ", models:{buckets:[" + "{val:" + honda_models[idx.get(0)] + ", count:" + honda_model_counts[idx.get(0)] + "}," + "{val:" + honda_models[idx.get(1)] + ", count:" + honda_model_counts[idx.get(1)] + "}]}" + "}]}}" ); } public void indexSimple(Client client) throws Exception { client.deleteByQuery("*:*", null); client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", "val_b", "true", "sparse_s", "one"), null); client.add(sdoc("id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", "val_b", "false"), null); client.add(sdoc("id", "3"), null); client.commit(); client.add(sdoc("id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3"), null); client.add(sdoc("id", "5", "cat_s", "B", "where_s", "NJ", "num_d", "11", "num_i", "7", "sparse_s", "two"),null); client.commit(); client.add(sdoc("id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5"),null); client.commit(); } public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { Client client = Client.localClient(); indexSimple(client); // regardless of the facet method (parameterized via default at test class level) // faceting on an "uninvertible=false docValues=false" field is not supported. // // it should behave the same as any attempt (using any method) at faceting on // and "indexed=false docValues=false" field... for (String f : Arrays.asList("where_s_not_indexed_sS", "where_s_multi_not_uninvert", "where_s_single_not_uninvert")) { SolrQueryRequest request = req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", "{x: {type:terms, field:'"+f+"'}}"); if (FacetField.FacetMethod.DEFAULT_METHOD == FacetField.FacetMethod.DVHASH && !f.contains("multi")) { // DVHASH is (currently) weird... // // it's ignored for multi valued fields -- but for single valued fields, it explicitly // checks the *FieldInfos* on the reader to see if the DocVals type is ok. // // Which means that unlike most other facet method:xxx options, it fails hard if you try to use it // on a field where no docs have been indexed (yet). expectThrows(SolrException.class, () ->{ assertJQ(request); }); } else { // In most cases, we should just get no buckets back... assertJQ(request , "response/numFound==3" , "facets/count==3" , "facets/x=={buckets:[]}" ); } } // regardless of the facet method (parameterized via default at test class level) // faceting on an "uninvertible=false docValues=true" field should work, // // it should behave equivilently to it's copyField source... for (String f : Arrays.asList("where_s", "where_s_multi_not_uninvert_dv", "where_s_single_not_uninvert_dv")) { assertJQ(req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", "{x: {type:terms, field:'"+f+"'}}") , "response/numFound==3" , "facets/count==3" , "facets/x=={buckets:[ {val:NY, count:2} , {val:NJ, count:1} ]}" ); } // faceting on an "uninvertible=false docValues=false" field should be possible // when using method:enum w/sort:index // // it should behave equivilent to it's copyField source... for (String f : Arrays.asList("where_s", "where_s_multi_not_uninvert", "where_s_single_not_uninvert")) { assertJQ(req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", "{x: {type:terms, sort:'index asc', method:enum, field:'"+f+"'}}") , "response/numFound==3" , "facets/count==3" , "facets/x=={buckets:[ {val:NJ, count:1} , {val:NY, count:2} ]}" ); } } /** * whitebox sanity checks that a shard request range facet that returns "between" or "after" * will cause the correct "actual_end" to be returned */ public void testRangeOtherWhitebox() throws Exception { Client client = Client.localClient(); indexSimple(client); // false is default, but randomly check explicit false as well final String nohardend = random().nextBoolean() ? "" : " hardend:false, "; { // first check some "phase #1" requests final SolrParams p = params("q", "*:*", "rows", "0", "isShard", "true", "distrib", "false", "_facet_", "{}", "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS); final String basic_opts = "type:range, field:num_d, start:-5, end:10, gap:7, "; final String buckets = "buckets:[ {val:-5.0,count:1}, {val:2.0,count:2}, {val:9.0,count:1} ], "; client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + " other:before}}") , "facets=={count:6, f:{" + buckets // before doesn't need actual_end + " before:{count:1}" + "} }" ); client.testJQ(params(p, "json.facet", "{f:{" + basic_opts + nohardend + "other:after}}") , "facets=={count:6, f:{" + buckets + " after:{count:0}, _actual_end:'16.0'" + "} }" ); client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:between}}") , "facets=={count:6, f:{" + buckets + " between:{count:4}, _actual_end:'16.0'" + "} }" ); client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:all}}") , "facets=={count:6, f:{" + buckets + " before:{count:1}," + " after:{count:0}," + " between:{count:4}," + " _actual_end:'16.0'" + "} }" ); // with hardend:true, not only do the buckets change, but actual_end should not need to be returned client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + " hardend:true, other:after}}") , "facets=={count:6, f:{" + " buckets:[ {val:-5.0,count:1}, {val:2.0,count:2}, {val:9.0,count:0} ], " + " after:{count:1}" + "} }" ); } { // now check some "phase #2" requests with refinement buckets already specified final String facet = "{ top:{ type:range, field:num_i, start:-5, end:5, gap:7," + nohardend + " other:all, facet:{ x:{ type:terms, field:cat_s, limit:1, refine:true } } } }"; // the behavior should be the same, regardless of wether we pass actual_end to the shards // because in a "mixed mode" rolling update, the shards should be smart enough to re-compute if // the merging node is running an older version that doesn't send it for (String actual_end : Arrays.asList(", _actual_end:'9'", "")) { client.testJQ(params("q", "*:*", "rows", "0", "isShard", "true", "distrib", "false", "shards.purpose", ""+FacetModule.PURPOSE_REFINE_JSON_FACETS, "json.facet", facet, "_facet_", "{ refine: { top: { between:{ x:{ _l:[B] } }" + actual_end + "} } }") , "facets=={top:{ buckets:[], between:{x:{buckets:[{val:B,count:3}] }} } }"); } } } @Test public void testExplicitQueryDomain() throws Exception { Client client = Client.localClient(); indexSimple(client); { // simple 'query' domain // the facet buckets for all of the requests below should be identical // only the numFound & top level facet count should differ final String expectedFacets = "facets/w=={ buckets:[" + " { val:'NJ', count:2}, " + " { val:'NY', count:1} ] }"; assertJQ(req("rows", "0", "q", "cat_s:B", "json.facet", "{w: {type:terms, field:'where_s'}}"), "response/numFound==3", "facets/count==3", expectedFacets); assertJQ(req("rows", "0", "q", "id:3", "json.facet", "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), "response/numFound==1", "facets/count==1", expectedFacets); assertJQ(req("rows", "0", "q", "*:*", "fq", "-*:*", "json.facet", "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), "response/numFound==0", "facets/count==0", expectedFacets); assertJQ(req("rows", "0", "q", "*:*", "fq", "-*:*", "domain_q", "cat_s:B", "json.facet", "{w: {type:terms, field:'where_s', domain: { query:{param:domain_q} }}}"), "response/numFound==0", "facets/count==0", expectedFacets); } { // a nested explicit query domain // for all of the "top" buckets, the subfacet should have identical sub-buckets final String expectedSubBuckets = "{ buckets:[ { val:'B', count:3}, { val:'A', count:2} ] }"; assertJQ(req("rows", "0", "q", "num_i:[0 TO *]", "json.facet", "{w: {type:terms, field:'where_s', " + " facet: { c: { type:terms, field:'cat_s', domain: { query:'*:*' }}}}}") , "facets/w=={ buckets:[" + " { val:'NJ', count:2, c: " + expectedSubBuckets + "}, " + " { val:'NY', count:1, c: " + expectedSubBuckets + "} " + "] }" ); } { // an (effectively) empty query should produce an error ignoreException("'query' domain can not be null"); ignoreException("'query' domain must not evaluate to an empty list"); for (String raw : Arrays.asList("null", "[ ]", "{param:bogus}")) { expectThrows(SolrException.class, () -> { assertJQ(req("rows", "0", "q", "num_i:[0 TO *]", "json.facet", "{w: {type:terms, field:'where_s', " + " facet: { c: { type:terms, field:'cat_s', domain: { query: "+raw+" }}}}}")); }); } } } @Test public void testSimpleSKG() throws Exception { Client client = Client.localClient(); indexSimple(client); // using relatedness() as a top level stat, not nested under any facet // (not particularly useful, but shouldn't error either) assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", "fore", "where_s:NY", "back", "*:*", "json.facet", " { skg: 'relatedness($fore,$back)' }") , "facets=={" + " count:5, " + " skg : { relatedness: 0.00699," + " foreground_popularity: 0.33333," + " background_popularity: 0.83333," + " } }" ); // simple single level facet w/skg stat & (re)sorting for (String sort : Arrays.asList("sort:'index asc'", "sort:'y desc'", "sort:'z desc'", "sort:'skg desc'", "prelim_sort:'count desc', sort:'index asc'", "prelim_sort:'count desc', sort:'y desc'", "prelim_sort:'count desc', sort:'z desc'", "prelim_sort:'count desc', sort:'skg desc'")) { // the relatedness score of each of our cat_s values is (conviniently) also alphabetical order, // (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') // // So all of these re/sort options should produce identical output (since the num buckets is < limit) // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", "fore", "where_s:NY", "back", "*:*", "json.facet", "" + "{x: { type: terms, field: 'cat_s', "+sort+", " + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") , "facets=={count:5, x:{ buckets:[" + " { val:'A', count:2, y:5.0, z:2, " + " skg : { relatedness: 0.00554, " //+ " foreground_count: 1, " //+ " foreground_size: 2, " //+ " background_count: 2, " //+ " background_size: 6," + " foreground_popularity: 0.16667," + " background_popularity: 0.33333, }," + " }, " + " { val:'B', count:3, y:-3.0, z:-5, " + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated //+ " foreground_count: 1, " //+ " foreground_size: 2, " //+ " background_count: 3, " //+ " background_size: 6," + " foreground_popularity: 0.16667," + " background_popularity: 0.5 }," + " } ] } } " ); } // trivial sanity check that we can (re)sort on SKG after pre-sorting on count... // ...and it's only computed for the top N buckets (based on our pre-sort) for (int overrequest : Arrays.asList(0, 1, 42)) { // based on our counts & relatedness values, the blackbox output should be the same for both // overrequest values ... only DebugAgg stats should change... DebugAgg.Acc.collectDocs.set(0); DebugAgg.Acc.collectDocSets.set(0); assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", "fore", "where_s:NJ", "back", "*:*", "json.facet", "" + "{x: { type: terms, field: 'cat_s', prelim_sort: 'count desc', sort:'skg desc', " + " limit: 1, overrequest: " + overrequest + ", " + " facet: { skg: 'debug(wrap,relatedness($fore,$back))' } } }") , "facets=={count:5, x:{ buckets:[" + " { val:'B', count:3, " + " skg : { relatedness: 0.00638, " //+ " foreground_count: 2, " //+ " foreground_size: 3, " //+ " background_count: 3, " //+ " background_size: 6," + " foreground_popularity: 0.33333," + " background_popularity: 0.5 }," + " }, " + " ] } } " ); // at most 2 buckets, regardless of overrequest... assertEqualsAndReset(0 < overrequest ? 2 : 1, DebugAgg.Acc.collectDocSets); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); } // SKG used in multiple nested facets // // we'll re-use these params in 2 requests, one will simulate a shard request final SolrParams nestedSKG = params ("q", "cat_s:[* TO *]", "rows", "0", "fore", "num_i:[-1000 TO 0]", "back", "*:*", "json.facet" , "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " + " facet: { skg: 'relatedness($fore,$back)', " + " y: { type: terms, field: 'where_s', sort: 'skg desc', " + " facet: { skg: 'relatedness($fore,$back)' } } } } }"); // plain old request assertJQ(req(nestedSKG) , "facets=={count:5, x:{ buckets:[" + " { val:'B', count:3, " + " skg : { relatedness: 0.01539, " //+ " foreground_count: 2, " //+ " foreground_size: 2, " //+ " background_count: 3, " //+ " background_size: 6, " + " foreground_popularity: 0.33333," + " background_popularity: 0.5 }," + " y : { buckets:[" + " { val:'NY', count: 1, " + " skg : { relatedness: 0.00554, " //+ " foreground_count: 1, " //+ " foreground_size: 2, " //+ " background_count: 2, " //+ " background_size: 6, " + " foreground_popularity: 0.16667, " + " background_popularity: 0.33333, " + " } }, " + " { val:'NJ', count: 2, " + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated //+ " foreground_count: 1, " //+ " foreground_size: 2, " //+ " background_count: 3, " //+ " background_size: 6, " + " foreground_popularity: 0.16667, " + " background_popularity: 0.5, " + " } }, " + " ] } " + " }, " + " { val:'A', count:2, " + " skg : { relatedness:-0.01097, " //+ " foreground_count: 0, " //+ " foreground_size: 2, " //+ " background_count: 2, " //+ " background_size: 6," + " foreground_popularity: 0.0," + " background_popularity: 0.33333 }," + " y : { buckets:[" + " { val:'NJ', count: 1, " + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated //+ " foreground_count: 0, " //+ " foreground_size: 0, " //+ " background_count: 3, " //+ " background_size: 6, " + " foreground_popularity: 0.0, " + " background_popularity: 0.5, " + " } }, " + " { val:'NY', count: 1, " + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated //+ " foreground_count: 0, " //+ " foreground_size: 0, " //+ " background_count: 2, " //+ " background_size: 6, " + " foreground_popularity: 0.0, " + " background_popularity: 0.33333, " + " } }, " + " ] } } ] } } "); // same request, but with whitebox params testing isShard // to verify the raw counts/sizes assertJQ(req(nestedSKG, // fake an initial shard request "distrib", "false", "isShard", "true", "_facet_", "{}", "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS) , "facets=={count:5, x:{ buckets:[" + " { val:'B', count:3, " + " skg : { " + " foreground_count: 2, " + " foreground_size: 2, " + " background_count: 3, " + " background_size: 6 }, " + " y : { buckets:[" + " { val:'NY', count: 1, " + " skg : { " + " foreground_count: 1, " + " foreground_size: 2, " + " background_count: 2, " + " background_size: 6, " + " } }, " + " { val:'NJ', count: 2, " + " skg : { " + " foreground_count: 1, " + " foreground_size: 2, " + " background_count: 3, " + " background_size: 6, " + " } }, " + " ] } " + " }, " + " { val:'A', count:2, " + " skg : { " + " foreground_count: 0, " + " foreground_size: 2, " + " background_count: 2, " + " background_size: 6 }," + " y : { buckets:[" + " { val:'NJ', count: 1, " + " skg : { " + " foreground_count: 0, " + " foreground_size: 0, " + " background_count: 3, " + " background_size: 6, " + " } }, " + " { val:'NY', count: 1, " + " skg : { " + " foreground_count: 0, " + " foreground_size: 0, " + " background_count: 2, " + " background_size: 6, " + " } }, " + " ] } } ] } } "); // SKG w/min_pop (NOTE: incredibly contrived and not-useful fore/back for testing min_pop w/shard sorting) // // we'll re-use these params in 2 requests, one will simulate a shard request final SolrParams minPopSKG = params ("q", "cat_s:[* TO *]", "rows", "0", "fore", "num_i:[0 TO 1000]", "back", "cat_s:B", "json.facet" , "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " + " facet: { skg: { type:func, func:'relatedness($fore,$back)', " + " min_popularity: 0.001 }" + " } } }"); // plain old request assertJQ(req(minPopSKG) , "facets=={count:5, x:{ buckets:[" + " { val:'B', count:3, " + " skg : { relatedness: -1.0, " //+ " foreground_count: 1, " //+ " foreground_size: 3, " //+ " background_count: 3, " //+ " background_size: 3, " + " foreground_popularity: 0.33333," + " background_popularity: 1.0," + " } }, " + " { val:'A', count:2, " + " skg : { relatedness:'-Infinity', " // bg_pop is below min_pop (otherwise 1.0) //+ " foreground_count: 2, " //+ " foreground_size: 3, " //+ " background_count: 0, " //+ " background_size: 3, " + " foreground_popularity: 0.66667," + " background_popularity: 0.0," + " } } ] } } "); // same request, but with whitebox params testing isShard // to verify the raw counts/sizes and that per-shard sorting doesn't pre-emptively sort "A" to the bottom assertJQ(req(minPopSKG, // fake an initial shard request "distrib", "false", "isShard", "true", "_facet_", "{}", "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS) , "facets=={count:5, x:{ buckets:[" + " { val:'A', count:2, " + " skg : { " + " foreground_count: 2, " + " foreground_size: 3, " + " background_count: 0, " + " background_size: 3, " + " } }, " + " { val:'B', count:3, " + " skg : { " + " foreground_count: 1, " + " foreground_size: 3, " + " background_count: 3, " + " background_size: 3, " + " } } ] } }"); } @Test public void testRepeatedNumerics() throws Exception { Client client = Client.localClient(); String field = "num_is"; // docValues of multi-valued points field can contain duplicate values... make sure they don't mess up our counts. client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", "val_b", "true", "sparse_s", "one", field,"0", field,"0"), null); client.commit(); client.testJQ(params("q", "id:1", "field", field , "json.facet", "{" + "f1:{terms:${field}}" + ",f2:'hll(${field})'" + ",f3:{type:range, field:${field}, start:0, end:1, gap:1}" + "}" ) , "facets=={count:1, " + "f1:{buckets:[{val:0, count:1}]}" + ",f2:1" + ",f3:{buckets:[{val:0, count:1}]}" + "}" ); } public void testDomainJoinSelf() throws Exception { Client client = Client.localClient(); indexSimple(client); // self join domain switch at the second level of faceting assertJQ(req("q", "*:*", "rows", "0", "json.facet", "" + "{x: { type: terms, field: 'num_i', " + " facet: { y: { domain: { join: { from: 'cat_s', to: 'cat_s' } }, " + " type: terms, field: 'where_s' " + " } } } }") , "facets=={count:6, x:{ buckets:[" + " { val:-5, count:2, " + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } }, " + " { val:2, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:3, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:7, count:1, " + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }" ); } public void testDomainGraph() throws Exception { Client client = Client.localClient(); indexSimple(client); // should be the same as join self assertJQ(req("q", "*:*", "rows", "0", "json.facet", "" + "{x: { type: terms, field: 'num_i', " + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s' } }, " + " type: terms, field: 'where_s' " + " } } } }") , "facets=={count:6, x:{ buckets:[" + " { val:-5, count:2, " + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } }, " + " { val:2, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:3, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:7, count:1, " + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }" ); // This time, test with a traversalFilter // should be the same as join self assertJQ(req("q", "*:*", "rows", "0", "json.facet", "" + "{x: { type: terms, field: 'num_i', " + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s', traversalFilter: 'where_s:NY' } }, " + " type: terms, field: 'where_s' " + " } } } }") , "facets=={count:6, x:{ buckets:[" + " { val:-5, count:2, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:2, count:1, " + " y : { buckets:[{ val:'NY', count:1 } ] } }, " + " { val:3, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:7, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } } ] } }" ); } public void testNestedJoinDomain() throws Exception { Client client = Client.localClient(); client.deleteByQuery("*:*", null); client.add(sdoc("id", "1", "1_s", "A", "2_s", "A", "3_s", "C", "y_s", "B", "x_t", "x z", "z_t", " 2 3"), null); client.add(sdoc("id", "2", "1_s", "B", "2_s", "A", "3_s", "B", "y_s", "B", "x_t", "x y ", "z_t", "1 3"), null); client.add(sdoc("id", "3", "1_s", "C", "2_s", "A", "3_s", "#", "y_s", "A", "x_t", " y z", "z_t", "1 2 "), null); client.add(sdoc("id", "4", "1_s", "A", "2_s", "B", "3_s", "C", "y_s", "A", "x_t", " z", "z_t", " 3"), null); client.add(sdoc("id", "5", "1_s", "B", "2_s", "_", "3_s", "B", "y_s", "C", "x_t", "x ", "z_t", "1 3"), null); client.add(sdoc("id", "6", "1_s", "C", "2_s", "B", "3_s", "A", "y_s", "C", "x_t", "x y z", "z_t", "1 "), null); client.commit(); assertJQ(req("q", "x_t:x", "rows", "0", // NOTE q - only x=x in base set (1,2,5,6) "json.facet", "" + "{x: { type: terms, field: 'x_t', " + " domain: { join: { from:'1_s', to:'2_s' } }," // y1 & y2 are the same facet, with *similar* child facet z1/z2 ... + " facet: { y1: { type: terms, field: 'y_s', " // z1 & z2 are same field, diff join... + " facet: { z1: { type: terms, field: 'z_t', " + " domain: { join: { from:'2_s', to:'3_s' } } } } }," + " y2: { type: terms, field: 'y_s', " // z1 & z2 are same field, diff join... + " facet: { z2: { type: terms, field: 'z_t', " + " domain: { join: { from:'3_s', to:'1_s' } } } } } } } }") , "facets=={count:4, " + "x:{ buckets:[" // joined 1->2: doc5 drops out, counts: z=4, x=3, y=3 + " { val:z, count:4, " // x=z (docs 1,3,4,6) y terms: A=2, B=1, C=1 + " y1 : { buckets:[ " // z1 joins 2->3... + " { val:A, count:2, " // A in docs(3,4), joins (A,B) -> docs(2,5,6) + " z1: { buckets:[{ val:'1', count:3 }, { val:'3', count:2 }] } }, " + " { val:B, count:1, " // B in doc1, joins A -> doc6 + " z1: { buckets:[{ val:'1', count:1 }] } }, " + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + " ] }, " + " y2 : { buckets:[ " // z2 joins 3->1... + " { val:A, count:2, " // A in docs(3,4), joins C -> docs(3,6) + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " + " { val:B, count:1, " // B in doc1, joins C -> docs(3,6) + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + " ] } }, " + " { val:x, count:3, " // x=x (docs 1,2,!5,6) y terms: B=2, C=1 + " y1 : { buckets:[ " // z1 joins 2->3... + " { val:B, count:2, " // B in docs(1,2), joins A -> doc6 + " z1: { buckets:[{ val:'1', count:1 }] } }, " + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + " ] }, " + " y2 : { buckets:[ " // z2 joins 3->1... + " { val:B, count:2, " // B in docs(1,2), joins C,B -> docs(2,3,5,6) + " z2: { buckets:[{ val:'1', count:4 }, { val:'3', count:2 }, { val:'2', count:1 }] } }, " + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + " ] } }, " + " { val:y, count:3, " // x=y (docs 2,3,6) y terms: A=1, B=1, C=1 + " y1 : { buckets:[ " // z1 joins 2->3... + " { val:A, count:1, " // A in doc3, joins A -> doc6 + " z1: { buckets:[{ val:'1', count:1 }] } }, " + " { val:B, count:1, " // B in doc2, joins A -> doc6 + " z1: { buckets:[{ val:'1', count:1 }] } }, " + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + " ] }, " + " y2 : { buckets:[ " // z2 joins 3->1... + " { val:A, count:1, " // A in doc3, joins # -> empty set + " z2: { buckets:[ ] } }, " + " { val:B, count:1, " // B in doc2, joins B -> docs(2,5) + " z2: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } }, " + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + " ]} }" + " ]}}" ); } @Test public void testMethodStream() throws Exception { Client client = Client.localClient(); indexSimple(client); // test multiple json.facet commands assertJQ(req("q", "*:*", "rows", "0" , "json.facet", "{x:'sum(num_d)'}" , "json.facet", "{y:'min(num_d)'}" ) , "facets=={count:6 , x:3.0, y:-9.0 }" ); // test streaming assertJQ(req("q", "*:*", "rows", "0" , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream }}" + // won't stream; need sort:index asc ", cat2:{terms:{field:'cat_s', method:stream, sort:'index asc' }}" + ", cat3:{terms:{field:'cat_s', method:stream, sort:'index asc', mincount:3 }}" + // mincount ", cat4:{terms:{field:'cat_s', method:stream, sort:'index asc', prefix:B }}" + // prefix ", cat5:{terms:{field:'cat_s', method:stream, sort:'index asc', offset:1 }}" + // offset " }" ) , "facets=={count:6 " + ", cat :{buckets:[{val:B, count:3},{val:A, count:2}]}" + ", cat2:{buckets:[{val:A, count:2},{val:B, count:3}]}" + ", cat3:{buckets:[{val:B, count:3}]}" + ", cat4:{buckets:[{val:B, count:3}]}" + ", cat5:{buckets:[{val:B, count:3}]}" + " }" ); // test nested streaming under non-streaming assertJQ(req("q", "*:*", "rows", "0" , "json.facet", "{ cat:{terms:{field:'cat_s', sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}" ) , "facets=={count:6 " + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" + "}" ); // test nested streaming under streaming assertJQ(req("q", "*:*", "rows", "0" , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}" ) , "facets=={count:6 " + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" + "}" ); // test nested streaming with stats under streaming assertJQ(req("q", "*:*", "rows", "0" , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ where:{terms:{field:where_s,method:stream,sort:'index asc',sort:'index asc', facet:{x:'max(num_d)'} }}} }}}" ) , "facets=={count:6 " + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1,x:2.0},{val:NY,count:1,x:4.0}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2,x:11.0},{val:NY,count:1,x:-5.0}]} }]}" + "}" ); // test nested streaming with stats under streaming with stats assertJQ(req("q", "*:*", "rows", "0", "facet","true" , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ y:'min(num_d)', where:{terms:{field:where_s,method:stream,sort:'index asc', facet:{x:'max(num_d)'} }}} }}}" ) , "facets=={count:6 " + ", cat :{buckets:[{val:A, count:2, y:2.0, where:{buckets:[{val:NJ,count:1,x:2.0},{val:NY,count:1,x:4.0}]} },{val:B, count:3, y:-9.0, where:{buckets:[{val:NJ,count:2,x:11.0},{val:NY,count:1,x:-5.0}]} }]}" + "}" ); assertJQ(req("q", "*:*", "fq","cat_s:A") , "response/numFound==2" ); } Map<String,String[]> suffixMap = new HashMap<>(); { suffixMap.put("_s", new String[]{"_s","_ss","_sd","_sds"} ); suffixMap.put("_ss", new String[]{"_ss","_sds"} ); suffixMap.put("_l", new String[]{"_l","_ls","_ld","_lds"} ); suffixMap.put("_ls", new String[]{"_ls","_lds"} ); suffixMap.put("_i", new String[]{"_i","_is","_id","_ids", "_l","_ls","_ld","_lds"} ); suffixMap.put("_is", new String[]{"_is","_ids", "_ls","_lds"} ); suffixMap.put("_d", new String[]{"_d","_ds","_dd","_dds"} ); suffixMap.put("_ds", new String[]{"_ds","_dds"} ); suffixMap.put("_f", new String[]{"_f","_fs","_fd","_fds", "_d","_ds","_dd","_dds"} ); suffixMap.put("_fs", new String[]{"_fs","_fds","_ds","_dds"} ); suffixMap.put("_dt", new String[]{"_dt","_dts","_dtd","_dtds"} ); suffixMap.put("_dts", new String[]{"_dts","_dtds"} ); suffixMap.put("_b", new String[]{"_b"} ); } List<String> getAlternatives(String field) { int idx = field.lastIndexOf("_"); if (idx<=0 || idx>=field.length()) return Collections.singletonList(field); String suffix = field.substring(idx); String[] alternativeSuffixes = suffixMap.get(suffix); if (alternativeSuffixes == null) return Collections.singletonList(field); String base = field.substring(0, idx); List<String> out = new ArrayList<>(alternativeSuffixes.length); for (String altS : alternativeSuffixes) { out.add( base + altS ); } Collections.shuffle(out, random()); return out; } @Test public void testStats() throws Exception { doStats(Client.localClient, params("debugQuery", Boolean.toString(random().nextBoolean()) )); } @Test public void testStatsDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); client.queryDefaults().set( "shards", servers.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()) ); doStats( client, params() ); } public void doStats(Client client, ModifiableSolrParams p) throws Exception { Map<String, List<String>> fieldLists = new HashMap<>(); fieldLists.put("noexist", getAlternatives("noexist_s")); fieldLists.put("cat_s", getAlternatives("cat_s")); fieldLists.put("where_s", getAlternatives("where_s")); fieldLists.put("num_d", getAlternatives("num_f")); // num_d name is historical, which is why we map it to num_f alternatives so we can include floats as well fieldLists.put("num_i", getAlternatives("num_i")); fieldLists.put("super_s", getAlternatives("super_s")); fieldLists.put("val_b", getAlternatives("val_b")); fieldLists.put("date", getAlternatives("date_dt")); fieldLists.put("sparse_s", getAlternatives("sparse_s")); fieldLists.put("multi_ss", getAlternatives("multi_ss")); // TODO: if a field will be used as a function source, we can't use multi-valued types for it (currently) int maxAlt = 0; for (List<String> fieldList : fieldLists.values()) { maxAlt = Math.max(fieldList.size(), maxAlt); } // take the field with the maximum number of alternative types and loop through our variants that many times for (int i=0; i<maxAlt; i++) { ModifiableSolrParams args = params(p); for (String field : fieldLists.keySet()) { List<String> alts = fieldLists.get(field); String alt = alts.get( i % alts.size() ); args.add(field, alt); } args.set("rows","0"); // doStatsTemplated(client, args); } // single valued strings doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_s", "cat_s","cat_s", "where_s","where_s", "num_d","num_d", "num_i","num_i", "num_l","long_l", "super_s","super_s", "val_b","val_b", "date","date_dt", "sparse_s","sparse_s" ,"multi_ss","multi_ss") ); // multi-valued strings, long/float substitute for int/double doStatsTemplated(client, params(p, "facet","true", "rows","0", "noexist","noexist_ss", "cat_s","cat_ss", "where_s","where_ss", "num_d","num_f", "num_i","num_l", "num_l","long_l", "num_is","num_ls", "num_fs", "num_ds", "super_s","super_ss", "val_b","val_b", "date","date_dt", "sparse_s","sparse_ss", "multi_ss","multi_ss") ); // multi-valued strings, method=dv for terms facets doStatsTemplated(client, params(p, "terms_method", "method:dv,", "rows", "0", "noexist", "noexist_ss", "cat_s", "cat_ss", "where_s", "where_ss", "num_d", "num_f", "num_i", "num_l", "num_l","long_l","super_s", "super_ss", "val_b", "val_b", "date", "date_dt", "sparse_s", "sparse_ss", "multi_ss", "multi_ss")); // single valued docvalues for strings, and single valued numeric doc values for numeric fields doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sd", "cat_s","cat_sd", "where_s","where_sd", "num_d","num_dd", "num_i","num_id", "num_is","num_lds", "num_l","long_ld", "num_fs","num_dds", "super_s","super_sd", "val_b","val_b", "date","date_dtd", "sparse_s","sparse_sd" ,"multi_ss","multi_sds") ); // multi-valued docvalues FacetFieldProcessorByArrayDV.unwrap_singleValued_multiDv = false; // better multi-valued coverage doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sds", "cat_s","cat_sds", "where_s","where_sds", "num_d","num_d", "num_i","num_i", "num_is","num_ids", "num_l","long_ld", "num_fs","num_fds", "super_s","super_sds", "val_b","val_b", "date","date_dtds", "sparse_s","sparse_sds" ,"multi_ss","multi_sds") ); // multi-valued docvalues FacetFieldProcessorByArrayDV.unwrap_singleValued_multiDv = true; doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sds", "cat_s","cat_sds", "where_s","where_sds", "num_d","num_d", "num_i","num_i", "num_is","num_ids", "num_l","long_ld", "num_fs","num_fds", "super_s","super_sds", "val_b","val_b", "date","date_dtds", "sparse_s","sparse_sds" ,"multi_ss","multi_sds") ); } public static void doStatsTemplated(Client client, ModifiableSolrParams p) throws Exception { int numShards = client.local() ? 1 : client.getClientProvider().all().size(); p.set("Z_num_i", "Z_" + p.get("num_i") ); p.set("Z_num_l", "Z_" + p.get("num_l") ); p.set("sparse_num_d", "sparse_" + p.get("num_d") ); if (p.get("num_is") == null) p.add("num_is","num_is"); if (p.get("num_fs") == null) p.add("num_fs","num_fs"); String terms = p.get("terms"); if (terms == null) terms=""; int limit=0; switch (random().nextInt(4)) { case 0: limit=-1; break; case 1: limit=1000000; break; case 2: // fallthrough case 3: // fallthrough } if (limit != 0) { terms=terms+"limit:"+limit+","; } String terms_method = p.get("terms_method"); if (terms_method != null) { terms=terms+terms_method; } String refine_method = p.get("refine_method"); if (refine_method == null && random().nextBoolean()) { refine_method = "refine:true,"; } if (refine_method != null) terms = terms + refine_method; p.set("terms", terms); // "${terms}" should be put at the beginning of generic terms facets. // It may specify "method=..." or "limit:-1", so should not be used if the facet explicitly specifies. MacroExpander m = new MacroExpander( p.getMap() ); String cat_s = m.expand("${cat_s}"); String where_s = m.expand("${where_s}"); String num_d = m.expand("${num_d}"); String num_i = m.expand("${num_i}"); String num_is = m.expand("${num_is}"); String num_fs = m.expand("${num_fs}"); String Z_num_i = m.expand("${Z_num_i}"); String Z_num_l = m.expand("${Z_num_l}"); String val_b = m.expand("${val_b}"); String date = m.expand("${date}"); String super_s = m.expand("${super_s}"); String sparse_s = m.expand("${sparse_s}"); String multi_ss = m.expand("${multi_ss}"); String sparse_num_d = m.expand("${sparse_num_d}"); client.deleteByQuery("*:*", null); Client iclient = client; /*** This code was not needed yet, but may be needed if we want to force empty shard results more often. // create a new indexing client that doesn't use one shard to better test for empty or non-existent results if (!client.local()) { List<SolrClient> shards = client.getClientProvider().all(); iclient = new Client(shards.subList(0, shards.size()-1), client.getClientProvider().getSeed()); } ***/ SolrInputDocument doc = sdoc("id", "1", cat_s, "A", where_s, "NY", num_d, "4", sparse_num_d, "6", num_i, "2", num_is,"2",num_is,"-5", num_fs,"2",num_fs,"-5", super_s, "zodiac", date, "2001-01-01T01:01:01Z", val_b, "true", sparse_s, "one"); iclient.add(doc, null); iclient.add(doc, null); iclient.add(doc, null); // a couple of deleted docs iclient.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", num_is,"3",num_is,"-1", num_fs,"3",num_fs,"-1.5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false" , multi_ss,"a", multi_ss,"b" , Z_num_i, "0", Z_num_l,"0"), null); iclient.add(sdoc("id", "3"), null); iclient.commit(); iclient.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", sparse_num_d,"-4",num_i, "3", num_is,"0",num_is,"3", num_fs,"0", num_fs,"3", super_s,"spiderman", date,"2003-03-03T03:03:03Z" , multi_ss, "b", Z_num_i, ""+Integer.MIN_VALUE, Z_num_l,Long.MIN_VALUE), null); iclient.add(sdoc("id", "5", cat_s, "B", where_s, "NJ", num_d, "11", num_i, "7", num_is,"0", num_fs,"0", super_s,"batman" , date,"2001-02-03T01:02:03Z" ,sparse_s,"two", multi_ss, "a"), null); iclient.commit(); iclient.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", num_is,"-1", num_fs,"-1.5", super_s,"hulk" , date,"2002-03-01T03:02:01Z" , multi_ss, "b", multi_ss, "a", Z_num_i, ""+Integer.MAX_VALUE, Z_num_l,Long.MAX_VALUE), null); iclient.commit(); client.commit(); // test for presence of debugging info ModifiableSolrParams debugP = params(p); debugP.set("debugQuery","true"); client.testJQ(params(debugP, "q", "*:*" , "json.facet", "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }" ) , "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}" , "debug/facet-trace==" // just test for presence, not exact structure / values ); // straight query facets client.testJQ(params(p, "q", "*:*" , "json.facet", "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }" ) , "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}" ); // nested query facets client.testJQ(params(p, "q", "*:*" , "json.facet", "{ catB:{type:query, q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}" ) , "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}" ); // nested query facets on subset client.testJQ(params(p, "q", "id:(2 3)" , "json.facet", "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={ 'count':2, 'catB':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}}" ); // nested query facets with stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:{q:'${where_s}:NJ'}}, ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}" ); // field/terms facet client.testJQ(params(p, "q", "*:*" , "json.facet", "{c1:{field:'${cat_s}'}, c2:{field:{field:'${cat_s}'}}, c3:{${terms} type:terms, field:'${cat_s}'} }" ) , "facets=={ 'count':6, " + "'c1':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + "'c2':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + "'c3':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}} " ); // test mincount client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', mincount:3}}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } " ); // test default mincount of 1 client.testJQ(params(p, "q", "id:1" , "json.facet", "{f1:{terms:'${cat_s}'}}" ) , "facets=={ 'count':1, " + "'f1':{ 'buckets':[{ 'val':'A', 'count':1}]} } " ); // test mincount of 0 - need processEmpty for distrib to match up client.testJQ(params(p, "q", "id:1" , "json.facet", "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0}}}" ) , "facets=={ 'count':1, " + "'f1':{ 'buckets':[{ 'val':'A', 'count':1}, { 'val':'B', 'count':0}]} } " ); // test mincount of 0 with stats, need processEmpty for distrib to match up client.testJQ(params(p, "q", "id:1" , "json.facet", "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0, allBuckets:true, facet:{n1:'sum(${num_d})'} }}}" ) , "facets=={ 'count':1, " + "'f1':{ allBuckets:{ 'count':1, n1:4.0}, 'buckets':[{ 'val':'A', 'count':1, n1:4.0}, { 'val':'B', 'count':0 /*, n1:0.0 */ }]} } " ); // test sorting by other stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" ); // test trivial re-sorting by stats // (there are other more indepth tests of this in doTestPrelimSorting, but this let's us sanity check // small responses with multiple templatized params of diff real types) client.testJQ(params(p, "q", "*:*", "json.facet" // num_d , "{f1:{terms:{${terms} field:'${cat_s}', " + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}," + " f2:{terms:{${terms} field:'${cat_s}', " + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" ) , "facets=={ 'count':6 " + ", f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" ); client.testJQ(params(p, "q", "*:*", "json.facet" // num_i , "{f1:{terms:{${terms} field:'${cat_s}', " + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_i})'} }}," + " f2:{terms:{${terms} field:'${cat_s}', " + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_i})'} }} }" ) , "facets=={ 'count':6 " + ", f1:{ 'buckets':[{ val:'A', count:2, n1:5.0 }, { val:'B', count:3, n1:-3.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:5.0 }]} }" ); // test sorting by other stats and more than one facet client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }}" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc' , facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }} }" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 , n2:3.0 }, { val:'B', count:3, n1:-3.0, n2:-1.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0, n2:-1.0}, { val:'A', count:2, n1:6.0 , n2:3.0 }]} }" ); // test sorting by other stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'min(${num_d})'} }" + " , f2:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'max(${num_d})'} } " + " , f3:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'unique(${where_s})'} } " + " , f4:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'hll(${where_s})'} } " + " , f5:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'variance(${num_d})'} } " + " , f6:{type:terms, field:${num_d}, limit:1, sort:'x desc', facet:{x:'hll(${num_i})'} } " + // facet on a field that will cause hashing and exercise hll.resize on numeric field "}" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, x:2.0 }, { val:'B', count:3, x:-9.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, x:11.0 }, { val:'A', count:2, x:4.0 }]} " + ", f3:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + ", f4:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + ", f5:{ 'buckets':[{ val:'B', count:3, x:74.6666666666666 }, { val:'A', count:2, x:1.0 }]} " + ", f6:{ buckets:[{ val:-9.0, count:1, x:1 }]} " + "}" ); // test sorting by stat with function client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'avg(add(${num_d},${num_d}))'} }}" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'avg(add(${num_d},${num_d}))'} }} }" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-2.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-2.0}, { val:'A', count:2, n1:6.0 }]} }" ); // facet on numbers to test resize from hashing (may need to be sorting by the metric to test that) client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:field, field:${num_is}, facet:{a:'min(${num_i})'}, sort:'a asc' }" + ",f2:{${terms} type:field, field:${num_is}, facet:{a:'max(${num_i})'}, sort:'a desc' }" + "}" ) , "facets=={count:6 " + ",f1:{ buckets:[{val:-1,count:2,a:-5},{val:3,count:2,a:-5},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:0,count:2,a:3} ] } " + ",f2:{ buckets:[{val:0,count:2,a:7},{val:3,count:2,a:3},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:-1,count:2,a:-5} ] } " + "}" ); // Same thing for dates // test min/max of string field if (date.equals("date_dt") || date.equals("date_dtd")) { // supports only single valued currently... see SOLR-11706 client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f3:{${terms} type:field, field:${num_is}, facet:{a:'min(${date})'}, sort:'a desc' }" + ",f4:{${terms} type:field, field:${num_is}, facet:{a:'max(${date})'}, sort:'a asc' }" + "}" ) , "facets=={count:6 " + ",f3:{ buckets:[{val:-1,count:2,a:'2002-02-02T02:02:02Z'},{val:3,count:2,a:'2002-02-02T02:02:02Z'},{val:0,count:2,a:'2001-02-03T01:02:03Z'},{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'} ] } " + ",f4:{ buckets:[{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'},{val:-1,count:2,a:'2002-03-01T03:02:01Z'},{val:0,count:2,a:'2003-03-03T03:03:03Z'},{val:3,count:2,a:'2003-03-03T03:03:03Z'} ] } " + "}" ); } // test field faceting on date field client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:field, field:${date}}" + ",f2:{${terms} type:field, field:${date} sort:'index asc'}" + ",f3:{${terms} type:field, field:${date} sort:'index desc'}" + // ",f4:{${terms} type:field, field:${date}, facet:{x:{type:field,field:${num_is},limit:1}} }" + "}" ) , "facets=={count:6 " + ",f1:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + ",f2:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + ",f3:{ buckets:[ {val:'2003-03-03T03:03:03Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2001-01-01T01:01:01Z', count:1} ] }" + "}" ); // percentiles 0,10,50,90,100 // catA: 2.0 2.2 3.0 3.8 4.0 // catB: -9.0 -8.2 -5.0 7.800000000000001 11.0 // all: -9.0 -7.3999999999999995 2.0 8.200000000000001 11.0 // test sorting by single percentile client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_d},50)'} }}" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_d},50)'} }} " + " , f3:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${sparse_num_d},50)'} }} " + "}" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:3.0 }, { val:'B', count:3, n1:-5.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-5.0}, { val:'A', count:2, n1:3.0 }]}" + ", f3:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3}]}" + "}" ); // test sorting by multiple percentiles (sort is by first) client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, sort:'n1 desc', facet:{n1:'percentile(${num_d},50,0,100)'} }}" + " , f2:{terms:{${terms} field:${cat_s}, sort:'n1 asc', facet:{n1:'percentile(${num_d},50,0,100)'} }} }" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:[3.0,2.0,4.0] }, { val:'B', count:3, n1:[-5.0,-9.0,11.0] }]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:[-5.0,-9.0,11.0]}, { val:'A', count:2, n1:[3.0,2.0,4.0] }]} }" ); // test sorting by count/index order client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + " , f3:{terms:{${terms} field:'${cat_s}', sort:'index asc' } }" + " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + "}" ) , "facets=={ count:6 " + " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + "}" ); // test sorting by default count/index order client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count' } }" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + " , f3:{terms:{${terms} field:'${cat_s}', sort:'index' } }" + " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + "}" ) , "facets=={ count:6 " + " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + "}" ); // test tiebreaks when sorting by count client.testJQ(params(p, "q", "id:1 id:6" , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + "}" ) , "facets=={ count:2 " + " ,f1:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + " ,f2:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + "}" ); // terms facet with nested query facet client.testJQ(params(p, "q", "*:*" , "json.facet", "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }} }" ) , "facets=={ 'count':6, " + "'cat':{ 'buckets':[{ 'val':'B', 'count':3, 'nj':{ 'count':2}}, { 'val':'A', 'count':2, 'nj':{ 'count':1}}]} }" ); // terms facet with nested query facet on subset client.testJQ(params(p, "q", "id:(2 5 4)" , "json.facet", "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }} }" ) , "facets=={ 'count':3, " + "'cat':{ 'buckets':[{ 'val':'B', 'count':2, 'nj':{ 'count':2}}, { 'val':'A', 'count':1, 'nj':{ 'count':1}}]} }" ); // test prefix client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:s, mincount:0 }}}" // even with mincount=0, we should only see buckets with the prefix ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } " ); // test prefix that doesn't exist client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:ttt, mincount:0 }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } " ); // test prefix that doesn't exist at start client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:aaaaaa, mincount:0 }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } " ); // test prefix that doesn't exist at end client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:zzzzzz, mincount:0 }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } " ); // test prefix on where field client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:terms, field:${where_s}, prefix:N }" + ",f2:{${terms} type:terms, field:${where_s}, prefix:NY }" + ",f3:{${terms} type:terms, field:${where_s}, prefix:NJ }" + "}" ) , "facets=={ 'count':6 " + ",f1:{ 'buckets':[ {val:NJ,count:3}, {val:NY,count:2} ]}" + ",f2:{ 'buckets':[ {val:NY,count:2} ]}" + ",f3:{ 'buckets':[ {val:NJ,count:3} ]}" + " } " ); // test prefix on real multi-valued field client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:terms, field:${multi_ss}, prefix:A }" + ",f2:{${terms} type:terms, field:${multi_ss}, prefix:z }" + ",f3:{${terms} type:terms, field:${multi_ss}, prefix:aa }" + ",f4:{${terms} type:terms, field:${multi_ss}, prefix:bb }" + ",f5:{${terms} type:terms, field:${multi_ss}, prefix:a }" + ",f6:{${terms} type:terms, field:${multi_ss}, prefix:b }" + "}" ) , "facets=={ 'count':6 " + ",f1:{buckets:[]}" + ",f2:{buckets:[]}" + ",f3:{buckets:[]}" + ",f4:{buckets:[]}" + ",f5:{buckets:[ {val:a,count:3} ]}" + ",f6:{buckets:[ {val:b,count:3} ]}" + " } " ); // // missing // // test missing w/ non-existent field client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${noexist}, missing:true}}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[], missing:{count:6} } } " ); // test missing client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{val:one, count:1}, {val:two, count:1}], missing:{count:4} } } " ); // test missing with stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, facet:{x:'sum(${num_d})'} }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{val:one, count:1, x:4.0}, {val:two, count:1, x:11.0}], missing:{count:4, x:-12.0} } } " ); // test that the missing bucket is not affected by any prefix client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:on, facet:{x:'sum(${num_d})'} }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{val:one, count:1, x:4.0}], missing:{count:4, x:-12.0} } } " ); // test missing with prefix that doesn't exist client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:ppp, facet:{x:'sum(${num_d})'} }}}" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[], missing:{count:4, x:-12.0} } } " ); // test numBuckets client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" , "json.facet", "{f1:{terms:{${terms_method} field:${cat_s}, numBuckets:true, limit:1}}}" // TODO: limit:0 produced an error ) , "facets=={ 'count':6, " + "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } " ); // prefix should lower numBuckets client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, prefix:B}}}" ) , "facets=={ 'count':6, " + "'f1':{ numBuckets:1, buckets:[{val:B, count:3}]} } " ); // mincount should not lower numBuckets (since SOLR-10552) client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, mincount:3}}}" ) , "facets=={ 'count':6, " + "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } " ); // basic range facet client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:2}, {val:5.0,count:0} ] } }" ); // basic range facet on dates client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${date}, start:'2001-01-01T00:00:00Z', end:'2003-01-01T00:00:00Z', gap:'+1YEAR'}}" ) , "facets=={count:6, f:{buckets:[ {val:'2001-01-01T00:00:00Z',count:2}, {val:'2002-01-01T00:00:00Z',count:2}] } }" ); // range facet on dates w/ stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${date}, start:'2002-01-01T00:00:00Z', end:'2005-01-01T00:00:00Z', gap:'+1YEAR', other:all, facet:{ x:'avg(${num_d})' } } }" ) , "facets=={count:6, f:{buckets:[ {val:'2002-01-01T00:00:00Z',count:2,x:-7.0}, {val:'2003-01-01T00:00:00Z',count:1,x:2.0}, {val:'2004-01-01T00:00:00Z',count:0}], before:{count:2,x:7.5}, after:{count:0}, between:{count:3,x:-4.0} } }" ); // basic range facet with "include" params client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, include:upper}}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2}, {val:5.0,count:0} ] } }" ); // range facet with sub facets and stats client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }" ); // range facet with sub facets and stats, with "other:all" client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + ",before: {count:1,x:-5.0,ny:{count:0}}" + ",after: {count:1,x:7.0, ny:{count:0}}" + ",between:{count:3,x:0.0, ny:{count:2}}" + " } }" ); // range facet with mincount client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, mincount:2, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}" ) , "facets=={count:6, f:{buckets:[ {val:0.0,count:2,x:5.0,ny:{count:1}} ]" + ",before: {count:1,x:-5.0,ny:{count:0}}" + ",after: {count:1,x:7.0, ny:{count:0}}" + ",between:{count:3,x:0.0, ny:{count:2}}" + " } }" ); // sparse range facet (with sub facets and stats), with "other:all" client.testJQ(params(p, "q", "*:*", "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, "+ " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, "+ " {val:-4.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val:-3.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val:-2.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val:-1.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 0.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 1.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 2.0,count:1, x:3.0,ny:{count:0}} , "+ " {val: 3.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 4.0,count:1, x:2.0,ny:{count:1}} , "+ " {val: 5.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 6.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 7.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 8.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ " {val: 9.0,count:0 /* ,x:0.0,ny:{count:0} */}"+ " ]" + " ,before: {count:1,x:-5.0,ny:{count:0}}" + " ,after: {count:1,x:7.0, ny:{count:0}}" + " ,between:{count:3,x:0.0, ny:{count:2}}" + " } }" ); // sparse range facet (with sub facets and stats), with "other:all" & mincount==1 client.testJQ(params(p, "q", "*:*", "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, mincount:1, "+ " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, "+ " {val: 2.0,count:1, x:3.0,ny:{count:0}} , "+ " {val: 4.0,count:1, x:2.0,ny:{count:1}} "+ " ]" + " ,before: {count:1,x:-5.0,ny:{count:0}}" + " ,after: {count:1,x:7.0, ny:{count:0}}" + " ,between:{count:3,x:0.0, ny:{count:2}}" + " } }" ); // range facet with sub facets and stats, with "other:all", on subset client.testJQ(params(p, "q", "id:(3 4 6)" , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" ) , "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + ",between:{count:2,x:-2.0, ny:{count:1}}" + " } }" ); // range facet with stats on string fields client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wn:'unique(${where_s})',wh:'hll(${where_s})' } }}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wn:1,wh:1}, {val:0.0,count:2,wn:2,wh:2}, {val:5.0,count:0}]" + " ,before:{count:1,wn:1,wh:1}" + " ,after:{count:1,wn:1,wh:1} " + " ,between:{count:3,wn:2,wh:2} " + " } }" ); if (where_s.equals("where_s") || where_s.equals("where_sd")) { // min/max only supports only single valued currently... see SOLR-11706 client.testJQ(params(p, "q", "*:*" , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wmin:'min(${where_s})', wmax:'max(${where_s})' } }}" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wmin:NY,wmax:NY}, {val:0.0,count:2,wmin:NJ,wmax:NY}, {val:5.0,count:0}]" + " ,before:{count:1,wmin:NJ,wmax:NJ}" + " ,after:{count:1,wmin:NJ,wmax:NJ} " + " ,between:{count:3,wmin:NJ,wmax:NY} " + " } }" ); } // stats at top level client.testJQ(params(p, "q", "*:*" , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', avg2:'avg(def(${num_d},0))', mind:'min(${num_d})', maxd:'max(${num_d})'" + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})'" + ", mini:'min(${num_i})', maxi:'max(${num_i})'" + " }" ) , "facets=={ 'count':6, " + "sum1:3.0, sumsq1:247.0, avg1:0.6, avg2:0.5, mind:-9.0, maxd:11.0" + ", numwhere:2, unique_num_i:4, unique_num_d:5, unique_date:5" + ", where_hll:2, hll_num_i:4, hll_num_d:5, hll_date:5" + ", med:2.0, perc:[-9.0,2.0,11.0], variance:49.04, stddev:7.002856560004639" + ", mini:-5, maxi:7" + "}" ); // stats at top level, no matches client.testJQ(params(p, "q", "id:DOESNOTEXIST" , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }" ) , "facets=={count:0 " + "\n// ,sum1:0.0, sumsq1:0.0, avg1:0.0, min1:'NaN', max1:'NaN', numwhere:0 \n" + " }" ); // stats at top level, matching documents, but no values in the field // NOTE: this represents the current state of what is returned, not the ultimate desired state. client.testJQ(params(p, "q", "id:3" , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }" ) , "facets=={count:1 " + ",sum1:0.0," + " sumsq1:0.0," + " avg1:0.0," + // TODO: undesirable. omit? // " min1:'NaN'," + // " max1:'NaN'," + " numwhere:0," + " unique_num_i:0," + " unique_num_d:0," + " unique_date:0," + " where_hll:0," + " hll_num_i:0," + " hll_num_d:0," + " hll_date:0," + " variance:0.0," + " stddev:0.0" + " }" ); // // tests on a multi-valued field with actual multiple values, just to ensure that we are // using a multi-valued method for the rest of the tests when appropriate. // client.testJQ(params(p, "q", "*:*" , "json.facet", "{cat:{terms:{${terms} field:'${multi_ss}', facet:{nj:{query:'${where_s}:NJ'}} } }} }" ) , "facets=={ 'count':6, " + "'cat':{ 'buckets':[{ 'val':'a', 'count':3, 'nj':{ 'count':2}}, { 'val':'b', 'count':3, 'nj':{ 'count':2}}]} }" ); // test unique on multi-valued field client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + "x:'unique(${multi_ss})'" + ",y:{query:{q:'id:2', facet:{x:'unique(${multi_ss})'} }} " + ",x2:'hll(${multi_ss})'" + ",y2:{query:{q:'id:2', facet:{x:'hll(${multi_ss})'} }} " + " }" ) , "facets=={count:6 " + ",x:2" + ",y:{count:1, x:2}" + // single document should yield 2 unique values ",x2:2" + ",y2:{count:1, x:2}" + // single document should yield 2 unique values " }" ); // test allBucket multi-valued client.testJQ(params(p, "q", "*:*" , "json.facet", "{x:{terms:{${terms} field:'${multi_ss}',allBuckets:true}}}" ) , "facets=={ count:6, " + "x:{ buckets:[{val:a, count:3}, {val:b, count:3}] , allBuckets:{count:6} } }" ); // allBuckets for multi-valued field with stats. This can sometimes take a different path of adding complete DocSets to the Acc // also test limit:0 client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f0:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0} " + ",f1:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, offset:1} " + // offset with 0 limit ",f2:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + ",f3:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, missing:true, facet:{x:'sum(${num_d})', y:'avg(${num_d})'}, sort:'x desc' } " + "}" ) , "facets=={ 'count':6, " + " f0:{allBuckets:{count:6}, buckets:[]}" + ",f1:{allBuckets:{count:6}, buckets:[]}" + ",f2:{allBuckets:{count:6, x:-15.0}, buckets:[]} " + ",f3:{allBuckets:{count:6, x:-15.0, y:-2.5}, buckets:[], missing:{count:2, x:4.0, y:4.0} }} " + "}" ); // allBuckets with numeric field with stats. // also test limit:0 client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f0:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0} " + ",f1:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, offset:1} " + // offset with 0 limit ",f2:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + "}" ) , "facets=={ 'count':6, " + " f0:{allBuckets:{count:5}, buckets:[]}" + ",f1:{allBuckets:{count:5}, buckets:[]}" + ",f2:{allBuckets:{count:5, x:3.0}, buckets:[]} " + "}" ); ////////////////////////////////////////////////////////////////////////////////////////////////////////// // test converting legacy facets // test mincount client.testJQ(params(p, "q", "*:*" // , "json.facet", "{f1:{terms:{field:'${cat_s}', mincount:3}}}" , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${cat_s}", "facet.mincount","3" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } " ); // test prefix client.testJQ(params(p, "q", "*:*" // , "json.facet", "{f1:{terms:{field:${super_s}, prefix:s, mincount:0 }}}" // even with mincount=0, we should only see buckets with the prefix , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${super_s}", "facet.prefix","s", "facet.mincount","0" ) , "facets=={ 'count':6, " + "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } " ); // range facet with sub facets and stats client.testJQ(params(p, "q", "*:*" // , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" , "facet","true", "facet.version", "2", "facet.range","{!key=f}${num_d}", "facet.range.start","-5", "facet.range.end","10", "facet.range.gap","5" , "f.f.facet.stat","x:sum(${num_i})", "subfacet.f.query","{!key=ny}${where_s}:NY" ) , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }" ); // test sorting by stat client.testJQ(params(p, "q", "*:*" // , "json.facet", "{f1:{terms:{field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}" + // " , f2:{terms:{field:'${cat_s}', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${cat_s}", "f.f1.facet.sort","n1 desc", "facet.stat","n1:sum(${num_d})" , "facet.field","{!key=f2}${cat_s}", "f.f1.facet.sort","n1 asc" ) , "facets=={ 'count':6, " + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" ); // range facet with sub facets and stats, with "other:all", on subset client.testJQ(params(p, "q", "id:(3 4 6)" //, "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" , "facet","true", "facet.version", "2", "facet.range","{!key=f}${num_d}", "facet.range.start","-5", "facet.range.end","10", "facet.range.gap","5" , "f.f.facet.stat","x:sum(${num_i})", "subfacet.f.query","{!key=ny}${where_s}:NY", "facet.range.other","all" ) , "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + ",between:{count:2,x:-2.0, ny:{count:1}}" + " } }" ); //////////////////////////////////////////////////////////////////////////////////////////// // multi-select / exclude tagged filters via excludeTags //////////////////////////////////////////////////////////////////////////////////////////// // test uncached multi-select (see SOLR-8496) client.testJQ(params(p, "q", "{!cache=false}*:*", "fq","{!tag=doc3,allfilt}-id:3" , "json.facet", "{" + "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + "}" ) , "facets=={ count:5, " + " f1:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + "}" ); // test sub-facets of empty buckets with domain filter exclusions (canProduceFromEmpty) (see SOLR-9519) client.testJQ(params(p, "q", "*:*", "fq","{!tag=doc3}id:non-exist", "fq","{!tag=CATA}${cat_s}:A" , "json.facet", "{" + "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + ",q1 :{type:query, q:'*:*', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under query ",q1a:{type:query, q:'id:4', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under query, make sure id:4 filter still applies ",r1 :{type:range, field:${num_d}, start:0, gap:3, end:5, facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under range, make sure range constraints still apply ",f2:{${terms} type:terms, field:${cat_s}, domain:{filter:'*:*'} } " + // domain filter doesn't widen, so f2 should not appear. "}" ) , "facets=={ count:0, " + " f1:{ buckets:[ {val:A, count:2} ] }" + ",q1:{ count:0, f1:{buckets:[{val:A, count:2}]} }" + ",q1a:{ count:0, f1:{buckets:[{val:A, count:1}]} }" + ",r1:{ buckets:[ {val:0.0,count:0,f1:{buckets:[{val:A, count:1}]}}, {val:3.0,count:0,f1:{buckets:[{val:A, count:1}]}} ] }" + "}" ); // nested query facets on subset (with excludeTags) client.testJQ(params(p, "q", "*:*", "fq","{!tag=abc}id:(2 3)" , "json.facet", "{ processEmpty:true," + " f1:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + ",f2:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:abc }}" + ",f3:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:'xyz,abc,qaz' }}" + ",f4:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz , abc , qaz] }}" + ",f5:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + // this is repeated, but it did fail when a single context was shared among sub-facets ",f6:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:abc}} }}" + // exclude in a sub-facet ",f7:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:xyz}} }}" + // exclude in a sub-facet that doesn't match "}" ) , "facets=={ 'count':2, " + " 'f1':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + ",'f2':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + ",'f3':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + ",'f4':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + ",'f5':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + ",'f6':{'count':1, 'nj':{'count':1}, 'ny':{'count':1}}" + ",'f7':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + "}" ); // terms facet with nested query facet (with excludeTags, using new format inside domain:{}) client.testJQ(params(p, "q", "{!cache=false}*:*", "fq", "{!tag=doc6,allfilt}-id:6", "fq","{!tag=doc3,allfilt}-id:3" , "json.facet", "{processEmpty:true, " + " f0:{${terms} type:terms, field:${cat_s}, facet:{nj:{query:'${where_s}:NJ'}} } " + ",f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + ",f2:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:allfilt},missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + ",f3:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc6}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + "}" ) , "facets=={ count:4, " + " f0:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] }" + ",f1:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] , missing:{count:1,nj:{count:0}} }" + ",f2:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:1,nj:{count:0}} }" + ",f3:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:0} }" + "}" ); // range facet with sub facets and stats, with "other:all" (with excludeTags) client.testJQ(params(p, "q", "*:*", "fq", "{!tag=doc6,allfilt}-id:6", "fq","{!tag=doc3,allfilt}-id:3" , "json.facet", "{processEmpty:true " + ", f1:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} , domain:{excludeTags:allfilt} }" + ", f2:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }" + "}" ) , "facets=={count:4" + ",f1:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + ",before: {count:1,x:-5.0,ny:{count:0}}" + ",after: {count:1,x:7.0, ny:{count:0}}" + ",between:{count:3,x:0.0, ny:{count:2}} }" + ",f2:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + ",before: {count:1,x:-5.0,ny:{count:0}}" + ",after: {count:1,x:7.0, ny:{count:0}}" + ",between:{count:2,x:5.0, ny:{count:1}} }" + "}" ); // // facet on numbers // client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:field, field:${num_i} }" + ",f2:{${terms} type:field, field:${num_i}, sort:'count asc' }" + ",f3:{${terms} type:field, field:${num_i}, sort:'index asc' }" + ",f4:{${terms} type:field, field:${num_i}, sort:'index desc' }" + ",f5:{${terms} type:field, field:${num_i}, sort:'index desc', limit:1, missing:true, allBuckets:true, numBuckets:true }" + ",f6:{${terms} type:field, field:${num_i}, sort:'index desc', mincount:2, numBuckets:true }" + // mincount should not lower numbuckets (since SOLR-10552) ",f7:{${terms} type:field, field:${num_i}, sort:'index desc', offset:2, numBuckets:true }" + // test offset ",f8:{${terms} type:field, field:${num_i}, sort:'index desc', offset:100, numBuckets:true }" + // test high offset ",f9:{${terms} type:field, field:${num_i}, sort:'x desc', facet:{x:'avg(${num_d})'}, missing:true, allBuckets:true, numBuckets:true }" + // test stats ",f10:{${terms} type:field, field:${num_i}, facet:{a:{query:'${cat_s}:A'}}, missing:true, allBuckets:true, numBuckets:true }" + // test subfacets ",f11:{${terms} type:field, field:${num_i}, facet:{a:'unique(${num_d})'} ,missing:true, allBuckets:true, sort:'a desc' }" + // test subfacet using unique on numeric field (this previously triggered a resizing bug) "}" ) , "facets=={count:6 " + ",f1:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + ",f2:{ buckets:[{val:2,count:1},{val:3,count:1},{val:7,count:1},{val:-5,count:2} ] } " + ",f3:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + ",f4:{ buckets:[{val:7,count:1},{val:3,count:1},{val:2,count:1},{val:-5,count:2} ] } " + ",f5:{ buckets:[{val:7,count:1}] , numBuckets:4, allBuckets:{count:5}, missing:{count:1} } " + ",f6:{ buckets:[{val:-5,count:2}] , numBuckets:4 } " + ",f7:{ buckets:[{val:2,count:1},{val:-5,count:2}] , numBuckets:4 } " + ",f8:{ buckets:[] , numBuckets:4 } " + ",f9:{ buckets:[{val:7,count:1,x:11.0},{val:2,count:1,x:4.0},{val:3,count:1,x:2.0},{val:-5,count:2,x:-7.0} ], numBuckets:4, allBuckets:{count:5,x:0.6},missing:{count:1,x:0.0} } " + // TODO: should missing exclude "x" because no values were collected? ",f10:{ buckets:[{val:-5,count:2,a:{count:0}},{val:2,count:1,a:{count:1}},{val:3,count:1,a:{count:1}},{val:7,count:1,a:{count:0}} ], numBuckets:4, allBuckets:{count:5},missing:{count:1,a:{count:0}} } " + ",f11:{ buckets:[{val:-5,count:2,a:2},{val:2,count:1,a:1},{val:3,count:1,a:1},{val:7,count:1,a:1} ] , missing:{count:1,a:0} , allBuckets:{count:5,a:5} } " + "}" ); // facet on a float field - shares same code with integers/longs currently, so we only need to test labels/sorting client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " f1:{${terms} type:field, field:${num_d} }" + ",f2:{${terms} type:field, field:${num_d}, sort:'index desc' }" + "}" ) , "facets=={count:6 " + ",f1:{ buckets:[{val:-9.0,count:1},{val:-5.0,count:1},{val:2.0,count:1},{val:4.0,count:1},{val:11.0,count:1} ] } " + ",f2:{ buckets:[{val:11.0,count:1},{val:4.0,count:1},{val:2.0,count:1},{val:-5.0,count:1},{val:-9.0,count:1} ] } " + "}" ); // test 0, min/max int/long client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + " u : 'unique(${Z_num_i})'" + ", u2 : 'unique(${Z_num_l})'" + ", min1 : 'min(${Z_num_i})', max1 : 'max(${Z_num_i})'" + ", min2 : 'min(${Z_num_l})', max2 : 'max(${Z_num_l})'" + ", f1:{${terms} type:field, field:${Z_num_i} }" + ", f2:{${terms} type:field, field:${Z_num_l} }" + "}" ) , "facets=={count:6 " + ",u:3" + ",u2:3" + ",min1:" + Integer.MIN_VALUE + ",max1:" + Integer.MAX_VALUE + ",min2:" + Long.MIN_VALUE + ",max2:" + Long.MAX_VALUE + ",f1:{ buckets:[{val:" + Integer.MIN_VALUE + ",count:1},{val:0,count:1},{val:" + Integer.MAX_VALUE+",count:1}]} " + ",f2:{ buckets:[{val:" + Long.MIN_VALUE + ",count:1},{val:0,count:1},{val:" + Long.MAX_VALUE+",count:1}]} " + "}" ); // multi-valued integer client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + " c1:'unique(${num_is})', c2:'hll(${num_is})'" + ",f1:{${terms} type:terms, field:${num_is} } " + "}" ) , "facets=={ count:6 " + ", c1:5, c2:5" + ", f1:{ buckets:[ {val:-1,count:2},{val:0,count:2},{val:3,count:2},{val:-5,count:1},{val:2,count:1} ] } " + "} " ); // multi-valued float client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + " c1:'unique(${num_fs})', c2:'hll(${num_fs})'" + ",f1:{${terms} type:terms, field:${num_fs} } " + "}" ) , "facets=={ count:6 " + ", c1:5, c2:5" + ", f1:{ buckets:[ {val:-1.5,count:2},{val:0.0,count:2},{val:3.0,count:2},{val:-5.0,count:1},{val:2.0,count:1} ] } " + "} " ); client.testJQ(params(p, "q", "*:*" , "json.facet", "{" + // "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + // overrequest=0 test needs predictable layout "cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up "}" ) , "facets=={ count:6" + // ", cat0:{ buckets:[ {val:B,count:3} ] }" ", cat1:{ buckets:[ {val:B,count:3} ] }" + ", catDef:{ buckets:[ {val:B,count:3} ] }" + ", catBig:{ buckets:[ {val:A,count:2} ] }" + "}" ); // test filter client.testJQ(params(p, "q", "*:*", "myfilt","${cat_s}:A", "ff","-id:1", "ff","-id:2" , "json.facet", "{" + "t:{${terms} type:terms, field:${cat_s}, domain:{filter:[]} }" + // empty filter list ",t_filt:{${terms} type:terms, field:${cat_s}, domain:{filter:'${cat_s}:B'} }" + ",t_filt2 :{${terms} type:terms, field:${cat_s}, domain:{filter:'{!query v=$myfilt}'} }" + // test access to qparser and other query parameters ",t_filt2a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:myfilt} } }" + // test filter via "param" type ",t_filt3: {${terms} type:terms, field:${cat_s}, domain:{filter:['-id:1','-id:2']} }" + ",t_filt3a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:ff}} }" + // test multi-valued query parameter ",q:{type:query, q:'${cat_s}:B', domain:{filter:['-id:5']} }" + // also tests a top-level negative filter ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, domain:{filter:'-id:4'} }" + "}" ) , "facets=={ count:6, " + "t :{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + ",t_filt :{ buckets:[ {val:B, count:3}] } " + ",t_filt2 :{ buckets:[ {val:A, count:2}] } " + ",t_filt2a:{ buckets:[ {val:A, count:2}] } " + ",t_filt3 :{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + ",t_filt3a:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + ",q:{count:2}" + ",r:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:1}, {val:5.0,count:0} ] }" + "}" ); // test acc reuse (i.e. reset() method). This is normally used for stats that are not calculated in the first phase, // currently non-sorting stats. client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{type:terms, field:'${cat_s}', facet:{h:'hll(${where_s})' , u:'unique(${where_s})', mind:'min(${num_d})', maxd:'max(${num_d})', mini:'min(${num_i})', maxi:'max(${num_i})'" + ", sumd:'sum(${num_d})', avgd:'avg(${num_d})', variance:'variance(${num_d})', stddev:'stddev(${num_d})' } }}" ) , "facets=={ 'count':6, " + "'f1':{ buckets:[{val:B, count:3, h:2, u:2, mind:-9.0, maxd:11.0, mini:-5, maxi:7, sumd:-3.0, avgd:-1.0, variance:74.66666666666667, stddev:8.640987597877148}," + " {val:A, count:2, h:2, u:2, mind:2.0, maxd:4.0, mini:2, maxi:3, sumd:6.0, avgd:3.0, variance:1.0, stddev:1.0}] } } " ); // test min/max of string field if (where_s.equals("where_s") || where_s.equals("where_sd")) { // supports only single valued currently... client.testJQ(params(p, "q", "*:* -(+${cat_s}:A +${where_s}:NJ)" // make NY the only value in bucket A , "json.facet", "{" + " f1:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} }" + ", f2:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min desc'}" + ", f3:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min asc'}" + ", f4:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max asc'}" + ", f5:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max desc'}" + "}" ) , "facets=={ count:5, " + " f1:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + ",f2:{ buckets:[{val:A, count:1, min:NY, max:NY}, {val:B, count:3, min:NJ, max:NY}]}" + ",f3:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + ",f4:{ buckets:[{val:B, count:3, min:batman, max:superman}, {val:A, count:1, min:zodiac, max:zodiac}]}" + ",f5:{ buckets:[{val:A, count:1, min:zodiac, max:zodiac}, {val:B, count:3, min:batman, max:superman}]}" + " } " ); } //////////////////////////////////////////////////////////////// // test which phase stats are calculated in //////////////////////////////////////////////////////////////// if (client.local()) { long creates, resets; // NOTE: these test the current implementation and may need to be adjusted to match future optimizations (such as calculating N buckets in parallel in the second phase) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{x:'debug()'} }}}" // x should be deferred to 2nd phase ) , "facets=={ 'count':6, " + "f1:{ buckets:[{ val:batman, count:1, x:1}]} } " ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets <= 1); assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably "1", but may be special slot for something. As long as it's not cardinality of the field creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{ x:'debug()'} , sort:'x asc' }}}" // sorting by x... must be done all at once in first phase ) , "facets=={ 'count':6, " + "f1:{ buckets:[{ val:batman, count:1, x:1}]}" + " } " ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets == 0); assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. // When limit:-1, we should do most stats in first phase (SOLR-10634) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{x:'debug()'} }}}" ) , "facets==" ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets == 0); assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. // Now for a numeric field // When limit:-1, we should do most stats in first phase (SOLR-10634) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{x:'debug()'} }}}" ) , "facets==" ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets == 0); assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. // But if we need to calculate domains anyway, it probably makes sense to calculate most stats in the 2nd phase (along with sub-facets) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}" ) , "facets==" ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets >=4); assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably 1, but could be higher // Now with a numeric field // But if we need to calculate domains anyway, it probably makes sense to calculate most stats in the 2nd phase (along with sub-facets) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}" ) , "facets==" ); assertEquals(1, DebugAgg.Acc.creates.get() - creates); assertTrue( DebugAgg.Acc.resets.get() - resets >=4); assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably 1, but could be higher } //////////////////////////////////////////////////////////////// end phase testing // // Refinement should not be needed to get exact results here, so this tests that // extra refinement requests are not sent out. This currently relies on counting the number of times // debug() aggregation is parsed... which is somewhat fragile. Please replace this with something // better in the future - perhaps debug level info about number of refinements or additional facet phases. // for (String facet_field : new String[]{cat_s,where_s,num_d,num_i,num_is,num_fs,super_s,date,val_b,multi_ss}) { ModifiableSolrParams test = params(p, "q", "id:(1 2)", "facet_field",facet_field, "debug", "true" , "json.facet", "{ " + " f1:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + ",f2:{type:terms, method:dvhash, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + ",f3:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()', y:{type:terms,field:'${facet_field}',refine:${refine}}} }" + // facet within facet " }" ); long startParses = DebugAgg.parses.get(); client.testJQ(params(test, "refine", "false") , "facets==" + "" ); long noRefineParses = DebugAgg.parses.get() - startParses; startParses = DebugAgg.parses.get(); client.testJQ(params(test, "refine", "true") , "facets==" + "" ); long refineParses = DebugAgg.parses.get() - startParses; assertEquals(noRefineParses, refineParses); } } public void testPrelimSortingSingleNode() throws Exception { doTestPrelimSortingSingleNode(false, false); } public void testPrelimSortingSingleNodeExtraStat() throws Exception { doTestPrelimSortingSingleNode(true, false); } public void testPrelimSortingSingleNodeExtraFacet() throws Exception { doTestPrelimSortingSingleNode(false, true); } public void testPrelimSortingSingleNodeExtraStatAndFacet() throws Exception { doTestPrelimSortingSingleNode(true, true); } /** @see #doTestPrelimSorting */ public void doTestPrelimSortingSingleNode(final boolean extraAgg, final boolean extraSubFacet) throws Exception { // we're not using Client.localClient because it doesn't provide a SolrClient to // use in doTestPrelimSorting -- so instead we make a single node, and don't use any shards param... final SolrInstances nodes = new SolrInstances(1, "solrconfig-tlog.xml", "schema_latest.xml"); try { final Client client = nodes.getClient(random().nextInt()); client.queryDefaults().set("debugQuery", Boolean.toString(random().nextBoolean()) ); doTestPrelimSorting(client, extraAgg, extraSubFacet); } finally { nodes.stop(); } } public void testPrelimSortingDistrib() throws Exception { doTestPrelimSortingDistrib(false, false); } public void testPrelimSortingDistribExtraStat() throws Exception { doTestPrelimSortingDistrib(true, false); } public void testPrelimSortingDistribExtraFacet() throws Exception { doTestPrelimSortingDistrib(false, true); } public void testPrelimSortingDistribExtraStatAndFacet() throws Exception { doTestPrelimSortingDistrib(true, true); } /** @see #doTestPrelimSorting */ public void doTestPrelimSortingDistrib(final boolean extraAgg, final boolean extraSubFacet) throws Exception { // we only use 2 shards, but we also want to to sanity check code paths if one (additional) shard is empty final int totalShards = random().nextBoolean() ? 2 : 3; final SolrInstances nodes = new SolrInstances(totalShards, "solrconfig-tlog.xml", "schema_latest.xml"); try { final Client client = nodes.getClient(random().nextInt()); client.queryDefaults().set( "shards", nodes.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()) ); doTestPrelimSorting(client, extraAgg, extraSubFacet); } finally { nodes.stop(); } } /** * Helper method that indexes a fixed set of docs to exactly <em>two</em> of the SolrClients * involved in the current Client such that each shard is identical for the purposes of simplified * doc/facet counting/assertions -- if there is only one SolrClient (Client.local) then it sends that * single shard twice as many docs so the counts/assertions will be consistent. * * Note: this test doesn't demonstrate practical uses of prelim_sort. * The scenerios it tests are actualy fairly absurd, but help to ensure that edge cases are covered. * * @param client client to use -- may be local or multishard * @param extraAgg if an extra aggregation function should be included, this hits slightly diff code paths * @param extraSubFacet if an extra sub facet should be included, this hits slightly diff code paths */ public void doTestPrelimSorting(final Client client, final boolean extraAgg, final boolean extraSubFacet) throws Exception { client.deleteByQuery("*:*", null); List<SolrClient> clients = client.getClientProvider().all(); // carefully craft two balanced shards (assuming we have at least two) and leave any other shards // empty to help check the code paths of some shards returning no buckets. // // if we are in a single node sitaution, these clients will be the same, and we'll have the same // total docs in our collection, but the numShardsWithData will be diff // (which will affect some assertions) final SolrClient shardA = clients.get(0); final SolrClient shardB = clients.get(clients.size()-1); final int numShardsWithData = (shardA == shardB) ? 1 : 2; // for simplicity, each foo_s "term" exists on each shard in the same number of docs as it's numeric // value (so count should be double the term) and bar_i is always 1 per doc (so sum(bar_i) // should always be the same as count) int id = 0; for (int i = 1; i <= 20; i++) { for (int j = 1; j <= i; j++) { shardA.add(new SolrInputDocument("id", ""+(++id), "foo_s", "foo_" + i, "bar_i", "1")); shardB.add(new SolrInputDocument("id", ""+(++id), "foo_s", "foo_" + i, "bar_i", "1")); } } assertEquals(420, id); // sanity check client.commit(); DebugAgg.Acc.collectDocs.set(0); DebugAgg.Acc.collectDocSets.set(0); // NOTE: sorting by index can cause some optimizations when using type=enum|stream // that cause our stat to be collected differently, so we have to account for that when // looking at DebugAdd collect stats if/when the test framework picks those // ...BUT... this only affects cloud, for single node prelim_sort overrides streaming final boolean indexSortDebugAggFudge = ( 1 < numShardsWithData ) && (FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.STREAM) || FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.ENUM)); final String common = "refine:true, type:field, field:'foo_s', facet: { " + "x: 'debug(wrap,sum(bar_i))' " + (extraAgg ? ", y:'min(bar_i)'" : "") + (extraSubFacet ? ", z:{type:query, q:'bar_i:0'}" : "") + "}"; final String yz = (extraAgg ? "y:1, " : "") + (extraSubFacet ? "z:{count:0}, " : ""); // really basic: top 5 by (prelim_sort) count, (re)sorted by a stat client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ "+ common+", limit:5, overrequest:0, " + " prelim_sort:'count desc', sort:'x asc' }" + " foo_b:{ "+ common+", limit:5, overrequest:0, " + " prelim_sort:'count asc', sort:'x desc' } }") , "facets=={ 'count':420, " + " 'foo_a':{ 'buckets':[" + " { val:foo_16, count:32, " + yz + "x:32.0}," + " { val:foo_17, count:34, " + yz + "x:34.0}," + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_19, count:38, " + yz + "x:38.0}," + " { val:foo_20, count:40, " + yz + "x:40.0}," + "] }," + " 'foo_b':{ 'buckets':[" + " { val:foo_5, count:10, " + yz + "x:10.0}," + " { val:foo_4, count:8, " + yz + "x:8.0}," + " { val:foo_3, count:6, " + yz + "x:6.0}," + " { val:foo_2, count:4, " + yz + "x:4.0}," + " { val:foo_1, count:2, " + yz + "x:2.0}," + "] }," + "}" ); // (re)sorting should prevent 'sum(bar_i)' from being computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 2 facets, 5 bucket, on each shard assertEqualsAndReset(numShardsWithData * 2 * 5, DebugAgg.Acc.collectDocSets); { // same really basic top 5 by (prelim_sort) count, (re)sorted by a stat -- w/allBuckets:true // check code paths with and w/o allBuckets // NOTE: allBuckets includes stats, but not other sub-facets... final String aout = "allBuckets:{ count:420, "+ (extraAgg ? "y:1, " : "") + "x:420.0 }"; client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ " + common+", allBuckets:true, limit:5, overrequest:0, " + " prelim_sort:'count desc', sort:'x asc' }" + " foo_b:{ " + common+", allBuckets:true, limit:5, overrequest:0, " + " prelim_sort:'count asc', sort:'x desc' } }") , "facets=={ 'count':420, " + " 'foo_a':{ " + aout + " 'buckets':[" + " { val:foo_16, count:32, " + yz + "x:32.0}," + " { val:foo_17, count:34, " + yz + "x:34.0}," + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_19, count:38, " + yz + "x:38.0}," + " { val:foo_20, count:40, " + yz + "x:40.0}," + "] }," + " 'foo_b':{ " + aout + " 'buckets':[" + " { val:foo_5, count:10, " + yz + "x:10.0}," + " { val:foo_4, count:8, " + yz + "x:8.0}," + " { val:foo_3, count:6, " + yz + "x:6.0}," + " { val:foo_2, count:4, " + yz + "x:4.0}," + " { val:foo_1, count:2, " + yz + "x:2.0}," + "] }," + "}" ); // because of allBuckets, we collect every doc on everyshard (x2 facets) in a single "all" slot... assertEqualsAndReset(2 * 420, DebugAgg.Acc.collectDocs); // ... in addition to collecting each of the choosen buckets (as sets) once per node... // 2 facets, 5 bucket, on each shard assertEqualsAndReset(numShardsWithData * 2 * 5, DebugAgg.Acc.collectDocSets); } // pagination (with offset) should happen against the re-sorted list (up to the effective limit) client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ "+common+", offset:2, limit:3, overrequest:0, " + " prelim_sort:'count desc', sort:'x asc' }" + " foo_b:{ "+common+", offset:2, limit:3, overrequest:0, " + " prelim_sort:'count asc', sort:'x desc' } }") , "facets=={ 'count':420, " + " 'foo_a':{ 'buckets':[" + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_19, count:38, " + yz + "x:38.0}," + " { val:foo_20, count:40, " + yz + "x:40.0}," + "] }," + " 'foo_b':{ 'buckets':[" + " { val:foo_3, count:6, " + yz + "x:6.0}," + " { val:foo_2, count:4, " + yz + "x:4.0}," + " { val:foo_1, count:2, " + yz + "x:2.0}," + "] }," + "}" ); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 2 facets, 5 buckets (including offset), on each shard assertEqualsAndReset(numShardsWithData * 2 * 5, DebugAgg.Acc.collectDocSets); // when overrequesting is used, the full list of candidate buckets should be considered client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ "+common+", limit:5, overrequest:5, " + " prelim_sort:'count desc', sort:'x asc' }" + " foo_b:{ "+common+", limit:5, overrequest:5, " + " prelim_sort:'count asc', sort:'x desc' } }") , "facets=={ 'count':420, " + " 'foo_a':{ 'buckets':[" + " { val:foo_11, count:22, " + yz + "x:22.0}," + " { val:foo_12, count:24, " + yz + "x:24.0}," + " { val:foo_13, count:26, " + yz + "x:26.0}," + " { val:foo_14, count:28, " + yz + "x:28.0}," + " { val:foo_15, count:30, " + yz + "x:30.0}," + "] }," + " 'foo_b':{ 'buckets':[" + " { val:foo_10, count:20, " + yz + "x:20.0}," + " { val:foo_9, count:18, " + yz + "x:18.0}," + " { val:foo_8, count:16, " + yz + "x:16.0}," + " { val:foo_7, count:14, " + yz + "x:14.0}," + " { val:foo_6, count:12, " + yz + "x:12.0}," + "] }," + "}" ); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 2 facets, 10 buckets (including overrequest), on each shard assertEqualsAndReset(numShardsWithData * 2 * 10, DebugAgg.Acc.collectDocSets); { // for an (effectively) unlimited facet, then from the black box perspective of the client, // preliminary sorting should be completely ignored... final StringBuilder expected = new StringBuilder("facets=={ 'count':420, 'foo_a':{ 'buckets':[\n"); for (int i = 20; 0 < i; i--) { final int x = i * 2; expected.append("{ val:foo_"+i+", count:"+x+", " + yz + "x:"+x+".0},\n"); } expected.append("] } }"); for (int limit : Arrays.asList(-1, 100000)) { for (String sortOpts : Arrays.asList("sort:'x desc'", "prelim_sort:'count asc', sort:'x desc'", "prelim_sort:'index asc', sort:'x desc'")) { final String snippet = "limit: " + limit + ", " + sortOpts; client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ "+common+", " + snippet + "}}") , expected.toString()); // the only difference from a white box perspective, is when/if we are // optimized to use the sort SlotAcc during collection instead of the prelim_sort SlotAcc.. // (ie: sub facet preventing single pass (re)sort in single node mode) if (((0 < limit || extraSubFacet) && snippet.contains("prelim_sort")) && ! (indexSortDebugAggFudge && snippet.contains("index asc"))) { // by-pass single pass collection, do everything as sets... assertEqualsAndReset(snippet, numShardsWithData * 20, DebugAgg.Acc.collectDocSets); assertEqualsAndReset(snippet, 0, DebugAgg.Acc.collectDocs); } else { // simple sort on x, or optimized single pass (re)sort, or indexSortDebugAggFudge // no sets should have been (post) collected for our stat assertEqualsAndReset(snippet, 0, DebugAgg.Acc.collectDocSets); // every doc should be collected... assertEqualsAndReset(snippet, 420, DebugAgg.Acc.collectDocs); } } } } // test all permutations of (prelim_sort | sort) on (index | count | stat) since there are // custom sort codepaths for index & count that work differnetly then general stats // // NOTE: there's very little value in re-sort by count/index after prelim_sort on something more complex, // typically better to just ignore the prelim_sort, but we're testing it for completeness // (and because you *might* want to prelim_sort by some function, for the purpose of "sampling" the // top results and then (re)sorting by count/index) for (String numSort : Arrays.asList("count", "x")) { // equivilent ordering client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo_a:{ "+common+", limit:10, overrequest:0, " + " prelim_sort:'"+numSort+" asc', sort:'index desc' }" + " foo_b:{ "+common+", limit:10, overrequest:0, " + " prelim_sort:'index asc', sort:'"+numSort+" desc' } }") , "facets=={ 'count':420, " + " 'foo_a':{ 'buckets':[" + " { val:foo_9, count:18, " + yz + "x:18.0}," + " { val:foo_8, count:16, " + yz + "x:16.0}," + " { val:foo_7, count:14, " + yz + "x:14.0}," + " { val:foo_6, count:12, " + yz + "x:12.0}," + " { val:foo_5, count:10, " + yz + "x:10.0}," + " { val:foo_4, count:8, " + yz + "x:8.0}," + " { val:foo_3, count:6, " + yz + "x:6.0}," + " { val:foo_2, count:4, " + yz + "x:4.0}," + " { val:foo_10, count:20, " + yz + "x:20.0}," + " { val:foo_1, count:2, " + yz + "x:2.0}," + "] }," + " 'foo_b':{ 'buckets':[" + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_17, count:34, " + yz + "x:34.0}," + " { val:foo_16, count:32, " + yz + "x:32.0}," + " { val:foo_15, count:30, " + yz + "x:30.0}," + " { val:foo_14, count:28, " + yz + "x:28.0}," + " { val:foo_13, count:26, " + yz + "x:26.0}," + " { val:foo_12, count:24, " + yz + "x:24.0}," + " { val:foo_11, count:22, " + yz + "x:22.0}," + " { val:foo_10, count:20, " + yz + "x:20.0}," + " { val:foo_1, count:2, " + yz + "x:2.0}," + "] }," + "}" ); // since these behave differently, defer DebugAgg counter checks until all are done... } // These 3 permutations defer the compuation of x as docsets, // so it's 3 x (10 buckets on each shard) (but 0 direct docs) // prelim_sort:count, sort:index // prelim_sort:index, sort:x // prelim_sort:index, sort:count // ...except when streaming, prelim_sort:index does no docsets. assertEqualsAndReset((indexSortDebugAggFudge ? 1 : 3) * numShardsWithData * 10, DebugAgg.Acc.collectDocSets); // This is the only situation that should (always) result in every doc being collected (but 0 docsets)... // prelim_sort:x, sort:index // ...but the (2) prelim_sort:index streaming situations above will also cause all the docs in the first // 10+1 buckets to be collected (enum checks limit+1 to know if there are "more"... assertEqualsAndReset(420 + (indexSortDebugAggFudge ? 2 * numShardsWithData * (1+10+11+12+13+14+15+16+17+18+19) : 0), DebugAgg.Acc.collectDocs); // sanity check of prelim_sorting in a sub facet client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ bar:{ type:query, query:'foo_s:[foo_10 TO foo_19]', facet: {" + " foo:{ "+ common+", limit:5, overrequest:0, " + " prelim_sort:'count desc', sort:'x asc' } } } }") , "facets=={ 'count':420, " + " 'bar':{ 'count':290, " + " 'foo':{ 'buckets':[" + " { val:foo_15, count:30, " + yz + "x:30.0}," + " { val:foo_16, count:32, " + yz + "x:32.0}," + " { val:foo_17, count:34, " + yz + "x:34.0}," + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_19, count:38, " + yz + "x:38.0}," + " ] }," + " }," + "}" ); // the prelim_sort should prevent 'sum(bar_i)' from being computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 5 bucket, on each shard assertEqualsAndReset(numShardsWithData * 5, DebugAgg.Acc.collectDocSets); { // sanity check how defered stats are handled // here we'll prelim_sort & sort on things that are both "not x" and using the debug() counters // (wrapping x) to assert that 'x' is correctly defered and only collected for the final top buckets final List<String> sorts = new ArrayList<String>(Arrays.asList("index asc", "count asc")); if (extraAgg) { sorts.add("y asc"); // same for every bucket, but index order tie breaker should kick in } for (String s : sorts) { client.testJQ(params("q", "*:*", "rows", "0", "json.facet" , "{ foo:{ "+ common+", limit:5, overrequest:0, " + " prelim_sort:'count desc', sort:'"+s+"' } }") , "facets=={ 'count':420, " + " 'foo':{ 'buckets':[" + " { val:foo_16, count:32, " + yz + "x:32.0}," + " { val:foo_17, count:34, " + yz + "x:34.0}," + " { val:foo_18, count:36, " + yz + "x:36.0}," + " { val:foo_19, count:38, " + yz + "x:38.0}," + " { val:foo_20, count:40, " + yz + "x:40.0}," + "] } }" ); // Neither prelim_sort nor sort should need 'sum(bar_i)' to be computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 5 bucket, on each shard assertEqualsAndReset(numShardsWithData * 5, DebugAgg.Acc.collectDocSets); } } } @Test public void testOverrequest() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); client.queryDefaults().set( "shards", servers.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()) ); List<SolrClient> clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); client.deleteByQuery("*:*", null); ModifiableSolrParams p = params("cat_s", "cat_s"); String cat_s = p.get("cat_s"); clients.get(0).add( sdoc("id", "1", cat_s, "A") ); // A will win tiebreak clients.get(0).add( sdoc("id", "2", cat_s, "B") ); clients.get(1).add( sdoc("id", "3", cat_s, "B") ); clients.get(1).add( sdoc("id", "4", cat_s, "A") ); // A will win tiebreak clients.get(2).add( sdoc("id", "5", cat_s, "B") ); clients.get(2).add( sdoc("id", "6", cat_s, "B") ); client.commit(); // Shard responses should be A=1, A=1, B=2, merged should be "A=2, B=2" hence A wins tiebreak client.testJQ(params(p, "q", "*:*", "json.facet", "{" + "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up "}" ) , "facets=={ count:6" + ", cat0:{ buckets:[ {val:A,count:2} ] }" + // with no overrequest, we incorrectly conclude that A is the top bucket ", cat1:{ buckets:[ {val:B,count:4} ] }" + ", catDef:{ buckets:[ {val:B,count:4} ] }" + ", catBig:{ buckets:[ {val:A,count:2} ] }" + "}" ); } @Test public void testBigger() throws Exception { ModifiableSolrParams p = params("rows", "0", "cat_s", "cat_ss", "where_s", "where_ss"); // doBigger(Client.localClient, p); initServers(); Client client = servers.getClient(random().nextInt()); client.queryDefaults().set( "shards", servers.getShards() ); doBigger( client, p ); } private String getId(int id) { return String.format(Locale.US, "%05d", id); } public void doBigger(Client client, ModifiableSolrParams p) throws Exception { MacroExpander m = new MacroExpander(p.getMap()); String cat_s = m.expand("${cat_s}"); String where_s = m.expand("${where_s}"); client.deleteByQuery("*:*", null); Random r = new Random(0); // make deterministic int numCat = 1; int numWhere = 2000000000; int commitPercent = 10; int ndocs=1000; Map<Integer, Map<Integer, List<Integer>>> model = new HashMap(); // cat->where->list<ids> for (int i=0; i<ndocs; i++) { Integer cat = r.nextInt(numCat); Integer where = r.nextInt(numWhere); client.add( sdoc("id", getId(i), cat_s,cat, where_s, where) , null ); Map<Integer,List<Integer>> sub = model.get(cat); if (sub == null) { sub = new HashMap<>(); model.put(cat, sub); } List<Integer> ids = sub.get(where); if (ids == null) { ids = new ArrayList<>(); sub.put(where, ids); } ids.add(i); if (r.nextInt(100) < commitPercent) { client.commit(); } } client.commit(); int sz = model.get(0).size(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'unique($where_s)'} }}" ) , "facets=={ 'count':" + ndocs + "," + "'f1':{ 'buckets':[{ 'val':'0', 'count':" + ndocs + ", x:" + sz + " }]} } " ); if (client.local()) { // distrib estimation prob won't match client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'hll($where_s)'} }}" ) , "facets=={ 'count':" + ndocs + "," + "'f1':{ 'buckets':[{ 'val':'0', 'count':" + ndocs + ", x:" + sz + " }]} } " ); } client.testJQ(params(p, "q", "*:*" , "json.facet", "{f1:{type:terms, field:id, limit:1, offset:990}}" ) , "facets=={ 'count':" + ndocs + "," + "'f1':{buckets:[{val:'00990',count:1}]}} " ); for (int i=0; i<20; i++) { int off = random().nextInt(ndocs); client.testJQ(params(p, "q", "*:*", "off",Integer.toString(off) , "json.facet", "{f1:{type:terms, field:id, limit:1, offset:${off}}}" ) , "facets=={ 'count':" + ndocs + "," + "'f1':{buckets:[{val:'" + getId(off) + "',count:1}]}} " ); } } public void testTolerant() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); client.queryDefaults().set("shards", servers.getShards() + ",[ff01::114]:33332/ignore_exception"); indexSimple(client); try { client.testJQ(params("ignore_exception", "true", "shards.tolerant", "false", "q", "*:*" , "json.facet", "{f:{type:terms, field:cat_s}}" ) , "facets=={ count:6," + "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + "}" ); fail("we should have failed"); } catch (Exception e) { // ok } client.testJQ(params("ignore_exception", "true", "shards.tolerant", "true", "q", "*:*" , "json.facet", "{f:{type:terms, field:cat_s}}" ) , "facets=={ count:6," + "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + "}" ); } @Test public void testBlockJoin() throws Exception { doBlockJoin(Client.localClient()); } public void doBlockJoin(Client client) throws Exception { ModifiableSolrParams p = params("rows","0"); client.deleteByQuery("*:*", null); SolrInputDocument parent; parent = sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q"); client.add(parent, null); parent = sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w"); parent.addChildDocument( sdoc("id","2.1", "type_s","page", "page_s","a", "v_t","x y z") ); parent.addChildDocument( sdoc("id","2.2", "type_s","page", "page_s","b", "v_t","x y ") ); parent.addChildDocument( sdoc("id","2.3", "type_s","page", "page_s","c", "v_t"," y z" ) ); client.add(parent, null); parent = sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e"); parent.addChildDocument( sdoc("id","3.1", "type_s","page", "page_s","d", "v_t","x ") ); parent.addChildDocument( sdoc("id","3.2", "type_s","page", "page_s","e", "v_t"," y ") ); parent.addChildDocument( sdoc("id","3.3", "type_s","page", "page_s","f", "v_t"," z") ); client.add(parent, null); parent = sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e"); client.add(parent, null); client.commit(); client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + "pages:{ type:query, domain:{blockChildren:'type_s:book'} , facet:{ x:{field:v_t} } }" + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'} }" + ",books:{ type:query, domain:{blockParent:'type_s:book'} , facet:{ x:{field:v_t} } }" + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book'} }" + ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{blockChildren:'type_s:book'}}} }" + ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{blockParent:'type_s:book'}}} }" + ",missing_blockParent:{ type:query, domain:{blockParent:'type_s:does_not_exist'} }" + ",missing_blockChildren:{ type:query, domain:{blockChildren:'type_s:does_not_exist'} }" + "}" ) , "facets=={ count:10" + ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", books:{count:4 , x:{buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ]} }" + ", books2:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + ", missing_blockParent:{count:0}" + ", missing_blockChildren:{count:0}" + "}" ); // no matches in base query client.testJQ(params("q", "no_match_s:NO_MATCHES" , "json.facet", "{ processEmpty:true," + "pages:{ type:query, domain:{blockChildren:'type_s:book'} }" + ",books:{ type:query, domain:{blockParent:'type_s:book'} }" + "}" ) , "facets=={ count:0" + ", pages:{count:0}" + ", books:{count:0}" + "}" ); // test facet on children nested under terms facet on parents client.testJQ(params("q", "*:*" , "json.facet", "{" + "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}} } }" + "}" ) , "facets=={ count:10" + ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + " ,{val:D,count:1,pages:{buckets:[]}}"+ "] }" + "}" ); // test filter after block join client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'*:*'} }" + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'-id:3.1'} }" + ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'*:*'} }" + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'id:1'} }" + "}" ) , "facets=={ count:10" + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + ", books2:{ buckets:[ {val:q,count:1} ] }" + "}" ); // test other various ways to get filters client.testJQ(params(p, "q", "*:*", "f1","-id:3.1", "f2","id:1" , "json.facet", "{ " + "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:[]} }" + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:{param:f1} } }" + ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:q},{param:missing_param}]} }" + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:f2}] } }" + "}" ) , "facets=={ count:10" + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + ", books2:{ buckets:[ {val:q,count:1} ] }" + "}" ); } /** * An explicit test for unique*(_root_) across all methods */ public void testUniquesForMethod() throws Exception { final Client client = Client.localClient(); final SolrParams p = params("rows","0"); client.deleteByQuery("*:*", null); SolrInputDocument parent; parent = sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q"); client.add(parent, null); parent = sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w"); parent.addChildDocument( sdoc("id","2.1", "type_s","page", "page_s","a", "v_t","x y z") ); parent.addChildDocument( sdoc("id","2.2", "type_s","page", "page_s","b", "v_t","x y ") ); parent.addChildDocument( sdoc("id","2.3", "type_s","page", "page_s","c", "v_t"," y z" ) ); client.add(parent, null); parent = sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e"); parent.addChildDocument( sdoc("id","3.1", "type_s","page", "page_s","d", "v_t","x ") ); parent.addChildDocument( sdoc("id","3.2", "type_s","page", "page_s","e", "v_t"," y ") ); parent.addChildDocument( sdoc("id","3.3", "type_s","page", "page_s","f", "v_t"," z") ); client.add(parent, null); parent = sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e"); client.add(parent, null); client.commit(); client.testJQ(params(p, "q", "type_s:page" , "json.facet", "{" + " types: {" + " type:terms," + " field:type_s," + " limit:-1," + " facet: {" + " in_books: \"unique(_root_)\" }"+ " }," + " pages: {" + " type:terms," + " field:page_s," + " limit:-1," + " facet: {" + " in_books: \"uniqueBlock(_root_)\" }"+ " }" + "}" ) , "response=={numFound:6,start:0,docs:[]}" , "facets=={ count:6," + "types:{" + " buckets:[ {val:page, count:6, in_books:2} ]}" + "pages:{" + " buckets:[ " + " {val:a, count:1, in_books:1}," + " {val:b, count:1, in_books:1}," + " {val:c, count:1, in_books:1}," + " {val:d, count:1, in_books:1}," + " {val:e, count:1, in_books:1}," + " {val:f, count:1, in_books:1}" + " ]}" + "}" ); } /** * Similar to {@link #testBlockJoin} but uses query time joining. * <p> * (asserts are slightly diff because if a query matches multiple types of documents, blockJoin domain switches * to parent/child domains preserve any existing parent/children from the original domain - eg: when q=*:*) * </p> */ public void testQueryJoinBooksAndPages() throws Exception { final Client client = Client.localClient(); final SolrParams p = params("rows","0"); client.deleteByQuery("*:*", null); // build up a list of the docs we want to test with List<SolrInputDocument> docsToAdd = new ArrayList<>(10); docsToAdd.add(sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q")); docsToAdd.add( sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w") ); docsToAdd.add( sdoc("book_id_s", "2", "id", "2.1", "type_s","page", "page_s","a", "v_t","x y z") ); docsToAdd.add( sdoc("book_id_s", "2", "id", "2.2", "type_s","page", "page_s","b", "v_t","x y ") ); docsToAdd.add( sdoc("book_id_s", "2", "id","2.3", "type_s","page", "page_s","c", "v_t"," y z" ) ); docsToAdd.add( sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e") ); docsToAdd.add( sdoc("book_id_s", "3", "id","3.1", "type_s","page", "page_s","d", "v_t","x ") ); docsToAdd.add( sdoc("book_id_s", "3", "id","3.2", "type_s","page", "page_s","e", "v_t"," y ") ); docsToAdd.add( sdoc("book_id_s", "3", "id","3.3", "type_s","page", "page_s","f", "v_t"," z") ); docsToAdd.add( sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e") ); // shuffle the docs since order shouldn't matter Collections.shuffle(docsToAdd, random()); for (SolrInputDocument doc : docsToAdd) { client.add(doc, null); } client.commit(); // the domains we'll be testing, initially setup for block join final String toChildren = "join: { from:'id', to:'book_id_s' }"; final String toParents = "join: { from:'book_id_s', to:'id' }"; final String toBogusChildren = "join: { from:'id', to:'does_not_exist_s' }"; final String toBogusParents = "join: { from:'book_id_s', to:'does_not_exist_s' }"; client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + "pages:{ type:query, domain:{"+toChildren+"} , facet:{ x:{field:v_t} } }" + ",pages2:{type:terms, field:v_t, domain:{"+toChildren+"} }" + ",books:{ type:query, domain:{"+toParents+"} , facet:{ x:{field:v_t} } }" + ",books2:{type:terms, field:v_t, domain:{"+toParents+"} }" + ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{"+toChildren+"}}} }" + ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{"+toParents+"}}} }" + ",missing_Parents:{ type:query, domain:{"+toBogusParents+"} }" + ",missing_Children:{ type:query, domain:{"+toBogusChildren+"} }" + "}" ) , "facets=={ count:10" + ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", books:{count:2 , x:{buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ]} }" + ", books2:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + ", missing_Parents:{count:0}" + ", missing_Children:{count:0}" + "}" ); // no matches in base query client.testJQ(params("q", "no_match_s:NO_MATCHES" , "json.facet", "{ processEmpty:true," + "pages:{ type:query, domain:{"+toChildren+"} }" + ",books:{ type:query, domain:{"+toParents+"} }" + "}" ) , "facets=={ count:0" + ", pages:{count:0}" + ", books:{count:0}" + "}" ); // test facet on children nested under terms facet on parents client.testJQ(params("q", "*:*" , "json.facet", "{" + "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{"+toChildren+"}} } }" + "}" ) , "facets=={ count:10" + ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + " ,{val:D,count:1,pages:{buckets:[]}}"+ "] }" + "}" ); // test filter after join client.testJQ(params(p, "q", "*:*" , "json.facet", "{ " + "pages1:{type:terms, field:v_t, domain:{"+toChildren+", filter:'*:*'} }" + ",pages2:{type:terms, field:v_t, domain:{"+toChildren+", filter:'-id:3.1'} }" + ",books:{type:terms, field:v_t, domain:{"+toParents+", filter:'*:*'} }" + ",books2:{type:terms, field:v_t, domain:{"+toParents+", filter:'id:2'} }" + "}" ) , "facets=={ count:10" + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + "}" ); // test other various ways to get filters client.testJQ(params(p, "q", "*:*", "f1","-id:3.1", "f2","id:2" , "json.facet", "{ " + "pages1:{type:terms, field:v_t, domain:{"+toChildren+", filter:[]} }" + ",pages2:{type:terms, field:v_t, domain:{"+toChildren+", filter:{param:f1} } }" + ",books:{type:terms, field:v_t, domain:{"+toParents+", filter:[{param:q},{param:missing_param}]} }" + ",books2:{type:terms, field:v_t, domain:{"+toParents+", filter:[{param:f2}] } }" + "}" ) , "facets=={ count:10" + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + "}" ); } @Test public void testErrors() throws Exception { doTestErrors(Client.localClient()); } public void doTestErrors(Client client) throws Exception { ModifiableSolrParams p = params("rows", "0"); client.deleteByQuery("*:*", null); try { client.testJQ(params("ignore_exception", "true", "q", "*:*" , "json.facet", "{f:{type:ignore_exception_aaa, field:bbbbbb}}" ) ); } catch (SolrException e) { assertTrue( e.getMessage().contains("ignore_exception_aaa") ); } } @Test public void testDomainErrors() throws Exception { Client client = Client.localClient(); client.deleteByQuery("*:*", null); indexSimple(client); // using assertQEx so that, status code and error message can be asserted assertQEx("Should Fail as filter with qparser in domain becomes null", "QParser yields null, perhaps unresolved parameter reference in: {!query v=$NOfilt}", req("q", "*:*", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!query v=$NOfilt}'}}}"), SolrException.ErrorCode.BAD_REQUEST ); assertQEx("Should Fail as filter in domain becomes null", "QParser yields null, perhaps unresolved parameter reference in: {!v=$NOfilt}", req("q", "*:*", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!v=$NOfilt}'}}}"), SolrException.ErrorCode.BAD_REQUEST ); // when domain type is invalid assertQEx("Should Fail as domain not of type map", "Expected Map for 'domain', received String=bleh , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:bleh}}"), SolrException.ErrorCode.BAD_REQUEST); // when domain = null, should not throw exception assertQ("Should pass as no domain is specified", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s}}")); // when blockChildren or blockParent is passed but not of string assertQEx("Should Fail as blockChildren is of type map", "Expected string type for param 'blockChildren' but got LinkedHashMap = {} , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{blockChildren:{}}}}"), SolrException.ErrorCode.BAD_REQUEST); assertQEx("Should Fail as blockParent is of type map", "Expected string type for param 'blockParent' but got LinkedHashMap = {} , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{blockParent:{}}}}"), SolrException.ErrorCode.BAD_REQUEST); } @Test public void testOtherErrorCases() throws Exception { Client client = Client.localClient(); client.deleteByQuery("*:*", null); indexSimple(client); // test for sort assertQEx("Should fail as sort is of type list", "Expected string/map for 'sort', received ArrayList=[count desc]", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:[\"count desc\"]}}"), SolrException.ErrorCode.BAD_REQUEST); assertQEx("Should fail as facet is not of type map", "Expected Map for 'facet', received ArrayList=[{}]", req("q", "*:*", "rows", "0", "json.facet", "[{}]"), SolrException.ErrorCode.BAD_REQUEST); // range facets assertQEx("Should fail as 'other' is of type Map", "Expected list of string or comma separated string values for 'other', " + "received LinkedHashMap={} , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12, gap:1, other:{}}}"), SolrException.ErrorCode.BAD_REQUEST); assertQEx("Should fail as 'include' is of type Map", "Expected list of string or comma separated string values for 'include', " + "received LinkedHashMap={} , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12, gap:1, include:{}}}"), SolrException.ErrorCode.BAD_REQUEST); // missing start parameter assertQEx("Should Fail with missing field error", "Missing required parameter: 'start' , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d}}"), SolrException.ErrorCode.BAD_REQUEST); // missing end parameter assertQEx("Should Fail with missing field error", "Missing required parameter: 'end' , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10}}"), SolrException.ErrorCode.BAD_REQUEST); // missing gap parameter assertQEx("Should Fail with missing field error", "Missing required parameter: 'gap' , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12}}"), SolrException.ErrorCode.BAD_REQUEST); // invalid value for facet field assertQEx("Should Fail as args is of type long", "Expected string/map for facet field, received Long=2 , path=facet/facet", req("q", "*:*", "rows", "0", "json.facet.facet.field", "2"), SolrException.ErrorCode.BAD_REQUEST); // invalid value for facet query assertQEx("Should Fail as args is of type long for query", "Expected string/map for facet query, received Long=2 , path=facet/facet", req("q", "*:*", "rows", "0", "json.facet.facet.query", "2"), SolrException.ErrorCode.BAD_REQUEST); // valid facet field assertQ("Should pass as this is valid query", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s}}")); // invalid perSeg assertQEx("Should fail as perSeg is not of type boolean", "Expected boolean type for param 'perSeg' but got Long = 2 , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,perSeg:2}}"), SolrException.ErrorCode.BAD_REQUEST); } public void XtestPercentiles() { AVLTreeDigest catA = new AVLTreeDigest(100); catA.add(4); catA.add(2); AVLTreeDigest catB = new AVLTreeDigest(100); catB.add(-9); catB.add(11); catB.add(-5); AVLTreeDigest all = new AVLTreeDigest(100); all.add(catA); all.add(catB); System.out.println(str(catA)); System.out.println(str(catB)); System.out.println(str(all)); // 2.0 2.2 3.0 3.8 4.0 // -9.0 -8.2 -5.0 7.800000000000001 11.0 // -9.0 -7.3999999999999995 2.0 8.200000000000001 11.0 } private static String str(AVLTreeDigest digest) { StringBuilder sb = new StringBuilder(); for (double d : new double[] {0,.1,.5,.9,1}) { sb.append(" ").append(digest.quantile(d)); } return sb.toString(); } /*** test code to ensure TDigest is working as we expect. */ public void XtestTDigest() throws Exception { AVLTreeDigest t1 = new AVLTreeDigest(100); t1.add(10, 1); t1.add(90, 1); t1.add(50, 1); System.out.println(t1.quantile(0.1)); System.out.println(t1.quantile(0.5)); System.out.println(t1.quantile(0.9)); assertEquals(t1.quantile(0.5), 50.0, 0.01); AVLTreeDigest t2 = new AVLTreeDigest(100); t2.add(130, 1); t2.add(170, 1); t2.add(90, 1); System.out.println(t2.quantile(0.1)); System.out.println(t2.quantile(0.5)); System.out.println(t2.quantile(0.9)); AVLTreeDigest top = new AVLTreeDigest(100); t1.compress(); ByteBuffer buf = ByteBuffer.allocate(t1.byteSize()); // upper bound t1.asSmallBytes(buf); byte[] arr1 = Arrays.copyOf(buf.array(), buf.position()); ByteBuffer rbuf = ByteBuffer.wrap(arr1); top.add(AVLTreeDigest.fromBytes(rbuf)); System.out.println(top.quantile(0.1)); System.out.println(top.quantile(0.5)); System.out.println(top.quantile(0.9)); t2.compress(); ByteBuffer buf2 = ByteBuffer.allocate(t2.byteSize()); // upper bound t2.asSmallBytes(buf2); byte[] arr2 = Arrays.copyOf(buf2.array(), buf2.position()); ByteBuffer rbuf2 = ByteBuffer.wrap(arr2); top.add(AVLTreeDigest.fromBytes(rbuf2)); System.out.println(top.quantile(0.1)); System.out.println(top.quantile(0.5)); System.out.println(top.quantile(0.9)); } public void XtestHLL() { HLLAgg.HLLFactory fac = new HLLAgg.HLLFactory(); HLL hll = fac.getHLL(); hll.addRaw(123456789); hll.addRaw(987654321); } /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ private static final void assertEqualsAndReset(String msg, long expected, AtomicLong actual) { final long current = actual.getAndSet(0); assertEquals(msg, expected, current); } /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ private static final void assertEqualsAndReset(long expected, AtomicLong actual) { final long current = actual.getAndSet(0); assertEquals(expected, current); } }
1
28,754
any particular reason for suppressing this change?
apache-lucene-solr
java
@@ -67,6 +67,17 @@ public abstract class MainnetDifficultyCalculators { return periodCount > 1 ? adjustForPeriod(periodCount, difficulty) : difficulty; }; + public static DifficultyCalculator<Void> DIFFICULTY_BOMB_REMOVED = + (time, parent, protocolContext) -> { + final BigInteger parentDifficulty = difficulty(parent.getDifficulty()); + final BigInteger difficulty = + ensureMinimumDifficulty( + BigInteger.valueOf(Math.max(1 - (time - parent.getTimestamp()) / 10, -99L)) + .multiply(parentDifficulty.divide(DIFFICULTY_BOUND_DIVISOR)) + .add(parentDifficulty)); + return difficulty; + }; + public static DifficultyCalculator<Void> BYZANTIUM = (time, parent, protocolContext) -> calculateByzantiumDifficulty(time, parent, BYZANTIUM_FAKE_BLOCK_OFFSET);
1
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.besu.ethereum.mainnet; import org.hyperledger.besu.ethereum.core.BlockHeader; import org.hyperledger.besu.ethereum.core.Hash; import org.hyperledger.besu.util.uint.UInt256; import java.math.BigInteger; import com.google.common.primitives.Ints; /** Provides the various difficultly calculates used on mainnet hard forks. */ public abstract class MainnetDifficultyCalculators { private static final BigInteger DIFFICULTY_BOUND_DIVISOR = BigInteger.valueOf(2_048L); private static final BigInteger MINIMUM_DIFFICULTY = BigInteger.valueOf(131_072L); private static final long EXPONENTIAL_DIFF_PERIOD = 100_000L; private static final int DURATION_LIMIT = 13; private static final BigInteger BIGINT_2 = BigInteger.valueOf(2L); private static final long BYZANTIUM_FAKE_BLOCK_OFFSET = 2_999_999L; private static final long CONSTANTINOPLE_FAKE_BLOCK_OFFSET = 4_999_999L; private MainnetDifficultyCalculators() {} public static DifficultyCalculator<Void> FRONTIER = (time, parent, protocolContext) -> { final BigInteger parentDifficulty = difficulty(parent.getDifficulty()); final BigInteger adjust = parentDifficulty.divide(DIFFICULTY_BOUND_DIVISOR); BigInteger difficulty; if (time - parent.getTimestamp() < DURATION_LIMIT) { difficulty = adjust.add(parentDifficulty); } else { difficulty = parentDifficulty.subtract(adjust); } difficulty = ensureMinimumDifficulty(difficulty); final long periodCount = (parent.getNumber() + 1) / EXPONENTIAL_DIFF_PERIOD; return periodCount > 1 ? adjustForPeriod(periodCount, difficulty) : difficulty; }; public static DifficultyCalculator<Void> HOMESTEAD = (time, parent, protocolContext) -> { final BigInteger parentDifficulty = difficulty(parent.getDifficulty()); final BigInteger difficulty = ensureMinimumDifficulty( BigInteger.valueOf(Math.max(1 - (time - parent.getTimestamp()) / 10, -99L)) .multiply(parentDifficulty.divide(DIFFICULTY_BOUND_DIVISOR)) .add(parentDifficulty)); final long periodCount = (parent.getNumber() + 1) / EXPONENTIAL_DIFF_PERIOD; return periodCount > 1 ? adjustForPeriod(periodCount, difficulty) : difficulty; }; public static DifficultyCalculator<Void> BYZANTIUM = (time, parent, protocolContext) -> calculateByzantiumDifficulty(time, parent, BYZANTIUM_FAKE_BLOCK_OFFSET); public static DifficultyCalculator<Void> CONSTANTINOPLE = (time, parent, protocolContext) -> calculateByzantiumDifficulty(time, parent, CONSTANTINOPLE_FAKE_BLOCK_OFFSET); private static BigInteger calculateByzantiumDifficulty( final long time, final BlockHeader parent, final long fakeBlockOffset) { final BigInteger parentDifficulty = difficulty(parent.getDifficulty()); final boolean hasOmmers = !parent.getOmmersHash().equals(Hash.EMPTY_LIST_HASH); final BigInteger difficulty = ensureMinimumDifficulty( BigInteger.valueOf(byzantiumX(time, parent.getTimestamp(), hasOmmers)) .multiply(parentDifficulty.divide(DIFFICULTY_BOUND_DIVISOR)) .add(parentDifficulty)); final long periodCount = fakeBlockNum(parent.getNumber(), fakeBlockOffset) / EXPONENTIAL_DIFF_PERIOD; return periodCount > 1 ? adjustForPeriod(periodCount, difficulty) : difficulty; } private static long fakeBlockNum(final long parentNum, final long fakeBlockOffset) { final long fakeBlockNumber; if (Long.compareUnsigned(parentNum, fakeBlockOffset) >= 0) { fakeBlockNumber = parentNum - fakeBlockOffset; } else { fakeBlockNumber = 0L; } return fakeBlockNumber; } private static long byzantiumX( final long blockTime, final long parentTime, final boolean hasOmmers) { long x = (blockTime - parentTime) / 9L; if (hasOmmers) { x = 2 - x; } else { x = 1 - x; } return Math.max(x, -99L); } private static BigInteger adjustForPeriod(final long periodCount, final BigInteger difficulty) { return difficulty.add(BIGINT_2.pow(Ints.checkedCast(periodCount - 2))); } private static BigInteger ensureMinimumDifficulty(final BigInteger difficulty) { return difficulty.compareTo(MINIMUM_DIFFICULTY) < 0 ? MINIMUM_DIFFICULTY : difficulty; } private static BigInteger difficulty(final UInt256 value) { return new BigInteger(1, value.getBytes().extractArray()); } }
1
20,312
Hot take: could this instead become `NO_DIFFICULTY_BOMB` and have` HOMESTEAD` wrap it? So this comes first then homestad with the bomb adjustment, making no difficulty the base case?
hyperledger-besu
java
@@ -16,6 +16,19 @@ // variables read from a blob.Bucket. // Use NewVariable to construct a *runtimevar.Variable. // +// URLs +// +// For runtimevar.OpenVariable URLs, blobvar registers for the scheme "blob". +// The host+path is used as the blob key. blobvar supports the following URL +// parameters: +// - bucket: The URL to be passed to blob.OpenBucket (required). +// blob.OpenBucket will be called once per unique bucket URL. +// - decoder: The decoder to use. Defaults to runtimevar.BytesDecoder. +// See runtimevar.DecoderByName for supported values. +// - wait: The poll interval; supported values are from time.ParseDuration. +// Defaults to 30s. +// Example URL: "blob://myvar.txt?bucket=fileblob:///foo/bar&decoder=string". +// // As // // blobvar exposes the following types for As:
1
// Copyright 2019 The Go Cloud Development Kit Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package blobvar provides a runtimevar implementation with // variables read from a blob.Bucket. // Use NewVariable to construct a *runtimevar.Variable. // // As // // blobvar exposes the following types for As: // - Snapshot: Not supported. // - Error: error, which can be passed to blob.ErrorAs. package blobvar // import "gocloud.dev/runtimevar/blobvar" import ( "bytes" "context" "time" "gocloud.dev/blob" "gocloud.dev/gcerrors" "gocloud.dev/runtimevar" "gocloud.dev/runtimevar/driver" ) // Options sets options. type Options struct { // WaitDuration controls the rate at which the blob is polled. // Defaults to 30 seconds. WaitDuration time.Duration } // NewVariable constructs a *runtimevar.Variable backed by the referenced blob. // Reads of the blob return raw bytes; provide a decoder to decode the raw bytes // into the appropriate type for runtimevar.Snapshot.Value. // See the runtimevar package documentation for examples of decoders. func NewVariable(bucket *blob.Bucket, key string, decoder *runtimevar.Decoder, opts *Options) (*runtimevar.Variable, error) { return runtimevar.New(newWatcher(bucket, key, decoder, opts)), nil } func newWatcher(bucket *blob.Bucket, key string, decoder *runtimevar.Decoder, opts *Options) driver.Watcher { if opts == nil { opts = &Options{} } return &watcher{ bucket: bucket, key: key, wait: driver.WaitDuration(opts.WaitDuration), decoder: decoder, } } // state implements driver.State. type state struct { val interface{} updateTime time.Time rawBytes []byte err error } // Value implements driver.State.Value. func (s *state) Value() (interface{}, error) { return s.val, s.err } // UpdateTime implements driver.State.UpdateTime. func (s *state) UpdateTime() time.Time { return s.updateTime } // As implements driver.State.As. func (s *state) As(i interface{}) bool { return false } // errorState returns a new State with err, unless prevS also represents // the same error, in which case it returns nil. func errorState(err error, prevS driver.State) driver.State { s := &state{err: err} if prevS == nil { return s } prev := prevS.(*state) if prev.err == nil { // New error. return s } if err == prev.err || err.Error() == prev.err.Error() { // Same error, return nil to indicate no change. return nil } return s } // watcher implements driver.Watcher for configurations provided by the Runtime Configurator // service. type watcher struct { bucket *blob.Bucket key string wait time.Duration decoder *runtimevar.Decoder } // WatchVariable implements driver.WatchVariable. func (w *watcher) WatchVariable(ctx context.Context, prev driver.State) (driver.State, time.Duration) { // Read the blob. b, err := w.bucket.ReadAll(ctx, w.key) if err != nil { return errorState(err, prev), w.wait } // See if it's the same raw bytes as before. if prev != nil && bytes.Equal(b, prev.(*state).rawBytes) { // No change! return nil, w.wait } // Decode the value. val, err := w.decoder.Decode(b) if err != nil { return errorState(err, prev), w.wait } return &state{val: val, updateTime: time.Now(), rawBytes: b}, w.wait } // Close implements driver.Close. func (w *watcher) Close() error { return nil } // ErrorAs implements driver.ErrorAs. // Since blobvar uses the blob package, ErrorAs delegates // to the bucket's ErrorAs method. func (w *watcher) ErrorAs(err error, i interface{}) bool { return w.bucket.ErrorAs(err, i) } // ErrorCode implements driver.ErrorCode. func (*watcher) ErrorCode(err error) gcerrors.ErrorCode { // err might have come from blob, in which case use its code. return gcerrors.Code(err) }
1
15,142
I suggest to use a scheme like "blobvar". "blob" would potentially collide if we have another API that use the blob package as a driver.
google-go-cloud
go
@@ -116,6 +116,14 @@ def get_listens(user_name): for listen in listens: listen_data.append(listen.to_api()) + latest_listen = db_conn.fetch_listens( + user_name, + limit=1, + to_ts=max_ts, + ) + latest_listen_ts = latest_listen[0].ts_since_epoch if len(latest_listen) > 0 else 0 + + if min_ts: listen_data = listen_data[::-1]
1
import ujson from flask import Blueprint, request, jsonify, current_app from werkzeug.exceptions import BadRequest, InternalServerError, Unauthorized, ServiceUnavailable, NotFound from listenbrainz.db.exceptions import DatabaseException from listenbrainz.webserver.decorators import crossdomain from listenbrainz import webserver import listenbrainz.db.user as db_user from listenbrainz.webserver.rate_limiter import ratelimit import listenbrainz.webserver.redis_connection as redis_connection from listenbrainz.webserver.views.api_tools import insert_payload, log_raise_400, validate_listen, MAX_LISTEN_SIZE, MAX_ITEMS_PER_GET,\ DEFAULT_ITEMS_PER_GET, LISTEN_TYPE_SINGLE, LISTEN_TYPE_IMPORT, LISTEN_TYPE_PLAYING_NOW import time api_bp = Blueprint('api_v1', __name__) @api_bp.route("/submit-listens", methods=["POST", "OPTIONS"]) @crossdomain(headers="Authorization, Content-Type") @ratelimit() def submit_listen(): """ Submit listens to the server. A user token (found on https://listenbrainz.org/profile/ ) must be provided in the Authorization header! Listens should be submitted for tracks when the user has listened to half the track or 4 minutes of the track, whichever is lower. If the user hasn't listened to 4 minutes or half the track, it doesn't fully count as a listen and should not be submitted. For complete details on the format of the JSON to be POSTed to this endpoint, see :ref:`json-doc`. :reqheader Authorization: Token <user token> :statuscode 200: listen(s) accepted. :statuscode 400: invalid JSON sent, see error message for details. :statuscode 401: invalid authorization. See error message for details. :resheader Content-Type: *application/json* """ user = _validate_auth_header() raw_data = request.get_data() try: data = ujson.loads(raw_data.decode("utf-8")) except ValueError as e: log_raise_400("Cannot parse JSON document: %s" % e, raw_data) try: payload = data['payload'] if len(payload) == 0: return "success" if len(raw_data) > len(payload) * MAX_LISTEN_SIZE: log_raise_400("JSON document is too large. In aggregate, listens may not " "be larger than %d characters." % MAX_LISTEN_SIZE, payload) if data['listen_type'] not in ('playing_now', 'single', 'import'): log_raise_400("JSON document requires a valid listen_type key.", payload) listen_type = _get_listen_type(data['listen_type']) if (listen_type == LISTEN_TYPE_SINGLE or listen_type == LISTEN_TYPE_PLAYING_NOW) and len(payload) > 1: log_raise_400("JSON document contains more than listen for a single/playing_now. " "It should contain only one.", payload) except KeyError: log_raise_400("Invalid JSON document submitted.", raw_data) # validate listens to make sure json is okay for listen in payload: validate_listen(listen, listen_type) try: insert_payload(payload, user, listen_type=_get_listen_type(data['listen_type'])) except ServiceUnavailable as e: raise except Exception as e: raise InternalServerError("Something went wrong. Please try again.") return jsonify({'status': 'ok'}) @api_bp.route("/user/<user_name>/listens") @ratelimit() def get_listens(user_name): """ Get listens for user ``user_name``. The format for the JSON returned is defined in our :ref:`json-doc`. If none of the optional arguments are given, this endpoint will return the :data:`~webserver.views.api.DEFAULT_ITEMS_PER_GET` most recent listens. The optional ``max_ts`` and ``min_ts`` UNIX epoch timestamps control at which point in time to start returning listens. You may specify max_ts or min_ts, but not both in one call. Listens are always returned in descending timestamp order. :param max_ts: If you specify a ``max_ts`` timestamp, listens with listened_at less than (but not including) this value will be returned. :param min_ts: If you specify a ``min_ts`` timestamp, listens with listened_at greater than (but not including) this value will be returned. :param count: Optional, number of listens to return. Default: :data:`~webserver.views.api.DEFAULT_ITEMS_PER_GET` . Max: :data:`~webserver.views.api.MAX_ITEMS_PER_GET` :statuscode 200: Yay, you have data! :resheader Content-Type: *application/json* """ max_ts = _parse_int_arg("max_ts") min_ts = _parse_int_arg("min_ts") # if no max given, use now() if max_ts and min_ts: log_raise_400("You may only specify max_ts or min_ts, not both.") # If none are given, start with now and go down if max_ts == None and min_ts == None: max_ts = int(time.time()) db_conn = webserver.create_influx(current_app) listens = db_conn.fetch_listens( user_name, limit=min(_parse_int_arg("count", DEFAULT_ITEMS_PER_GET), MAX_ITEMS_PER_GET), from_ts=min_ts, to_ts=max_ts, ) listen_data = [] for listen in listens: listen_data.append(listen.to_api()) if min_ts: listen_data = listen_data[::-1] return jsonify({'payload': { 'user_id': user_name, 'count': len(listen_data), 'listens': listen_data, }}) @api_bp.route("/user/<user_name>/playing-now") @ratelimit() def get_playing_now(user_name): """ Get the listen being played right now for user ``user_name``. This endpoint returns a JSON document with a single listen in the same format as the ``/user/<user_name>/listens`` endpoint, with one key difference, there will only be one listen returned at maximum and the listen will not contain a ``listened_at`` element. The format for the JSON returned is defined in our :ref:`json-doc`. :statuscode 200: Yay, you have data! :resheader Content-Type: *application/json* """ user = db_user.get_by_mb_id(user_name) if user is None: raise NotFound("Cannot find user: %s" % user_name) playing_now_listen = redis_connection._redis.get_playing_now(user['id']) listen_data = [] count = 0 if playing_now_listen: count += 1 listen_data = [{ 'track_metadata': playing_now_listen.data, }] return jsonify({ 'payload': { 'count': count, 'user_id': user_name, 'playing_now': True, 'listens': listen_data, }, }) @api_bp.route('/latest-import', methods=['GET', 'POST', 'OPTIONS']) @crossdomain(headers='Authorization, Content-Type') @ratelimit() def latest_import(): """ Get and update the timestamp of the newest listen submitted in previous imports to ListenBrainz. In order to get the timestamp for a user, make a GET request to this endpoint. The data returned will be JSON of the following format: { 'musicbrainz_id': the MusicBrainz ID of the user, 'latest_import': the timestamp of the newest listen submitted in previous imports. Defaults to 0 } :param user_name: the MusicBrainz ID of the user whose data is needed :statuscode 200: Yay, you have data! :resheader Content-Type: *application/json* In order to update the timestamp of a user, you'll have to provide a user token in the Authorization Header. User tokens can be found on https://listenbrainz.org/profile/ . The JSON that needs to be posted must contain a field named `ts` in the root with a valid unix timestamp. :reqheader Authorization: Token <user token> :statuscode 200: latest import timestamp updated :statuscode 400: invalid JSON sent, see error message for details. :statuscode 401: invalid authorization. See error message for details. """ if request.method == 'GET': user_name = request.args.get('user_name', '') user = db_user.get_by_mb_id(user_name) if user is None: raise NotFound("Cannot find user: {user_name}".format(user_name=user_name)) return jsonify({ 'musicbrainz_id': user['musicbrainz_id'], 'latest_import': 0 if not user['latest_import'] else int(user['latest_import'].strftime('%s')) }) elif request.method == 'POST': user = _validate_auth_header() try: ts = ujson.loads(request.get_data()).get('ts', 0) except ValueError: raise BadRequest('Invalid data sent') try: db_user.increase_latest_import(user['musicbrainz_id'], int(ts)) except DatabaseException as e: current_app.logger.error("Error while updating latest import: {}".format(e)) raise InternalServerError('Could not update latest_import, try again') return jsonify({'status': 'ok'}) @api_bp.route('/validate-token', methods=['GET']) @ratelimit() def validate_token(): """ Check whether a User Token is a valid entry in the database. In order to query this endpoint, send a GET request. A JSON response will be returned, with one of three codes. :statuscode 200: The user token is valid/invalid. :statuscode 400: No token was sent to the endpoint. """ auth_token = request.args.get('token', '') if not auth_token: raise BadRequest("You need to provide an Authorization token.") user = db_user.get_by_token(auth_token) if user is None: return jsonify({ 'code': 200, 'message': 'Token invalid.' }) else: return jsonify({ 'code': 200, 'message': 'Token valid.' }) def _parse_int_arg(name, default=None): value = request.args.get(name) if value: try: return int(value) except ValueError: raise BadRequest("Invalid %s argument: %s" % (name, value)) else: return default def _validate_auth_header(): auth_token = request.headers.get('Authorization') if not auth_token: raise Unauthorized("You need to provide an Authorization header.") try: auth_token = auth_token.split(" ")[1] except IndexError: raise Unauthorized("Provided Authorization header is invalid.") user = db_user.get_by_token(auth_token) if user is None: raise Unauthorized("Invalid authorization token.") return user def _get_listen_type(listen_type): return { 'single': LISTEN_TYPE_SINGLE, 'import': LISTEN_TYPE_IMPORT, 'playing_now': LISTEN_TYPE_PLAYING_NOW }.get(listen_type)
1
15,321
This assumes that the user has at least one listen, it should have a guard around it, because this won't always be the case
metabrainz-listenbrainz-server
py
@@ -365,7 +365,7 @@ namespace pwiz.Skyline.Properties { } /// <summary> - /// Looks up a localized string similar to An error occurred while trying to display the document &apos;{0}&apos;. + /// Looks up a localized string similar to An error occurred while trying to display the document &apos;{0}&apos;. ///There might be something wrong with default web browser on this computer.. /// </summary> public static string ActionTutorial_ExtractTutorial_An_error_occurred_while_trying_to_display_the_document___0____ {
1
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace pwiz.Skyline.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] public class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("pwiz.Skyline.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } /// <summary> /// Looks up a localized string similar to Failed to find a valid Analyst installation. /// </summary> public static string AbiMethodExporter_EnsureAnalyst_Failed_to_find_a_valid_Analyst_installation { get { return ResourceManager.GetString("AbiMethodExporter_EnsureAnalyst_Failed_to_find_a_valid_Analyst_installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Waiting for Analyst to start.... /// </summary> public static string AbiMethodExporter_EnsureAnalyst_Waiting_for_Analyst_to_start { get { return ResourceManager.GetString("AbiMethodExporter_EnsureAnalyst_Waiting_for_Analyst_to_start", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Working.... /// </summary> public static string AbiMethodExporter_EnsureAnalyst_Working { get { return ResourceManager.GetString("AbiMethodExporter_EnsureAnalyst_Working", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string AbstractChromGraphItem_CustomizeXAxis_Retention_Time { get { return ResourceManager.GetString("AbstractChromGraphItem_CustomizeXAxis_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Intensity. /// </summary> public static string AbstractChromGraphItem_CustomizeYAxis_Intensity { get { return ResourceManager.GetString("AbstractChromGraphItem_CustomizeYAxis_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation scheme is set to multiplexing but file does not appear to contain multiplexed acquisition data.. /// </summary> public static string AbstractDemultiplexer_AnalyzeFile_Isolation_scheme_is_set_to_multiplexing_but_file_does_not_appear_to_contain_multiplexed_acquisition_data_ { get { return ResourceManager.GetString("AbstractDemultiplexer_AnalyzeFile_Isolation_scheme_is_set_to_multiplexing_but_fil" + "e_does_not_appear_to_contain_multiplexed_acquisition_data_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot save to {0}.. /// </summary> public static string AbstractDiaExporter_Export_Cannot_save_to__0__ { get { return ResourceManager.GetString("AbstractDiaExporter_Export_Cannot_save_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Isolation List. /// </summary> public static string AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List { get { return ResourceManager.GetString("AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Isolation List ({0} cycles out of {0}). /// </summary> public static string AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__0__ { get { return ResourceManager.GetString("AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_o" + "ut_of__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Isolation List ({0} cycles out of {1}). /// </summary> public static string AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__1__ { get { return ResourceManager.GetString("AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_o" + "ut_of__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scan in imported file appears to be missing an isolation window center.. /// </summary> public static string AbstractIsoWindowMapper_Add_Scan_in_imported_file_appears_to_be_missing_an_isolation_window_center_ { get { return ResourceManager.GetString("AbstractIsoWindowMapper_Add_Scan_in_imported_file_appears_to_be_missing_an_isolat" + "ion_window_center_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The isolation width for a scan in the imported file could not be determined.. /// </summary> public static string AbstractIsoWindowMapper_Add_The_isolation_width_for_a_scan_in_the_imported_file_could_not_be_determined_ { get { return ResourceManager.GetString("AbstractIsoWindowMapper_Add_The_isolation_width_for_a_scan_in_the_imported_file_c" + "ould_not_be_determined_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tried to get a window mask for {0}, a spectrum with previously unobserved isolation windows. Demultiplexing requires a repeating cycle of isolation windows.. /// </summary> public static string AbstractIsoWindowMapper_GetWindowMask_Tried_to_get_a_window_mask_for__0___a_spectrum_with_previously_unobserved_isolation_windows__Demultiplexing_requires_a_repeating_cycle_of_isolation_windows_ { get { return ResourceManager.GetString("AbstractIsoWindowMapper_GetWindowMask_Tried_to_get_a_window_mask_for__0___a_spect" + "rum_with_previously_unobserved_isolation_windows__Demultiplexing_requires_a_repe" + "ating_cycle_of_isolation_windows_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number of required transitions {0} exceeds the maximum {1}. /// </summary> public static string AbstractMassListExporter_Export_The_number_of_required_transitions__0__exceeds_the_maximum__1__ { get { return ResourceManager.GetString("AbstractMassListExporter_Export_The_number_of_required_transitions__0__exceeds_th" + "e_maximum__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check max concurrent {0} count.. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_Check_max_concurrent__0__count { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_Check_max_concurrent__0__count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check max concurrent {0} count and optimization step count.. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_Check_max_concurrent__0__count_and_optimization_step_count { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_Check_max_concurrent__0__count_an" + "d_optimization_step_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to schedule the following peptides with the current settings:. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_Failed_to_schedule_the_following_peptides_with_the_current_settings { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_Failed_to_schedule_the_following_" + "peptides_with_the_current_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Maximum transitions per file required. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_Maximum_transitions_per_file_required { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_Maximum_transitions_per_file_requ" + "ired", resourceCulture); } } /// <summary> /// Looks up a localized string similar to precursors. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_precursors { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The required peptide {0} cannot be scheduled. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_The_required_peptide__0__cannot_be_scheduled { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_The_required_peptide__0__cannot_b" + "e_scheduled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to transitions. /// </summary> public static string AbstractMassListExporter_ExportScheduledBuckets_transitions { get { return ResourceManager.GetString("AbstractMassListExporter_ExportScheduledBuckets_transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following modifications could not be interpreted.. /// </summary> public static string AbstractModificationMatcher_UninterpretedMods_The_following_modifications_could_not_be_interpreted { get { return ResourceManager.GetString("AbstractModificationMatcher_UninterpretedMods_The_following_modifications_could_n" + "ot_be_interpreted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} = {1}. /// </summary> public static string AbstractModificationMatcherFoundMatches__0__equals__1__ { get { return ResourceManager.GetString("AbstractModificationMatcherFoundMatches__0__equals__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z. /// </summary> public static string AbstractMSGraphItem_CustomizeXAxis_MZ { get { return ResourceManager.GetString("AbstractMSGraphItem_CustomizeXAxis_MZ", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Intensity. /// </summary> public static string AbstractMSGraphItem_CustomizeYAxis_Intensity { get { return ResourceManager.GetString("AbstractMSGraphItem_CustomizeYAxis_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occured while uploading to Panorama, would you like to go to Panorama?. /// </summary> public static string AbstractPanoramaPublishClient_UploadSharedZipFile_An_error_occured_while_uploading_to_Panorama__would_you_like_to_go_to_Panorama_ { get { return ResourceManager.GetString("AbstractPanoramaPublishClient_UploadSharedZipFile_An_error_occured_while_uploadin" + "g_to_Panorama__would_you_like_to_go_to_Panorama_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document import was cancelled on the server. Would you like to go to Panorama?. /// </summary> public static string AbstractPanoramaPublishClient_UploadSharedZipFile_Document_import_was_cancelled_on_the_server__Would_you_like_to_go_to_Panorama_ { get { return ResourceManager.GetString("AbstractPanoramaPublishClient_UploadSharedZipFile_Document_import_was_cancelled_o" + "n_the_server__Would_you_like_to_go_to_Panorama_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upload succeeded, would you like to view the file in Panorama?. /// </summary> public static string AbstractPanoramaPublishClient_UploadSharedZipFile_Upload_succeeded__would_you_like_to_view_the_file_in_Panorama_ { get { return ResourceManager.GetString("AbstractPanoramaPublishClient_UploadSharedZipFile_Upload_succeeded__would_you_lik" + "e_to_view_the_file_in_Panorama_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to score = {0:F06}. /// </summary> public static string AbstractSpectrumGraphItem_AddAnnotations_ { get { return ResourceManager.GetString("AbstractSpectrumGraphItem_AddAnnotations_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to rank {0}. /// </summary> public static string AbstractSpectrumGraphItem_GetLabel_rank__0__ { get { return ResourceManager.GetString("AbstractSpectrumGraphItem_GetLabel_rank__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Files extracted to: {0}. /// </summary> public static string ActionTutorial_client_DownloadFileCompleted_File_saved_at___0_ { get { return ResourceManager.GetString("ActionTutorial_client_DownloadFileCompleted_File_saved_at___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error {0}. /// </summary> public static string ActionTutorial_DownloadTutorials_Error__0_ { get { return ResourceManager.GetString("ActionTutorial_DownloadTutorials_Error__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while trying to display the document &apos;{0}&apos;. ///There might be something wrong with default web browser on this computer.. /// </summary> public static string ActionTutorial_ExtractTutorial_An_error_occurred_while_trying_to_display_the_document___0____ { get { return ResourceManager.GetString("ActionTutorial_ExtractTutorial_An_error_occurred_while_trying_to_display_the_docu" + "ment___0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading to: {0}{1}Tutorial will open in browser when download is complete.. /// </summary> public static string ActionTutorial_LongWaitDlgAction_Downloading_to___0__1_Tutorial_will_open_in_browser_when_download_is_complete_ { get { return ResourceManager.GetString("ActionTutorial_LongWaitDlgAction_Downloading_to___0__1_Tutorial_will_open_in_brow" + "ser_when_download_is_complete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading Tutorial ZIP File. /// </summary> public static string ActionTutorial_LongWaitDlgAction_Downloading_Tutorial_Zip_File { get { return ResourceManager.GetString("ActionTutorial_LongWaitDlgAction_Downloading_Tutorial_Zip_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extracting Tutorial ZIP File. /// </summary> public static string ActionTutorial_LongWaitDlgAction_Extracting_Tutorial_Zip_File_in_the_same_directory_ { get { return ResourceManager.GetString("ActionTutorial_LongWaitDlgAction_Extracting_Tutorial_Zip_File_in_the_same_directo" + "ry_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap add_pro32 { get { object obj = ResourceManager.GetObject("add_pro32", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Please choose the iRT calculator you would like to add.. /// </summary> public static string AddIrtCalculatorDlg_OkDialog_Please_choose_the_iRT_calculator_you_would_like_to_add { get { return ResourceManager.GetString("AddIrtCalculatorDlg_OkDialog_Please_choose_the_iRT_calculator_you_would_like_to_a" + "dd", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a path to an existing iRT database.. /// </summary> public static string AddIrtCalculatorDlg_OkDialog_Please_specify_a_path_to_an_existing_iRT_database { get { return ResourceManager.GetString("AddIrtCalculatorDlg_OkDialog_Please_specify_a_path_to_an_existing_iRT_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not an iRT database.. /// </summary> public static string AddIrtCalculatorDlg_OkDialog_The_file__0__is_not_an_iRT_database { get { return ResourceManager.GetString("AddIrtCalculatorDlg_OkDialog_The_file__0__is_not_an_iRT_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string AddIrtCalculatorDlgOkDialogThe_file__0__does_not_exist { get { return ResourceManager.GetString("AddIrtCalculatorDlgOkDialogThe_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 new peptide will be added to the {0}.. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_new_peptide_will_be_added_to_the__0__ { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_new_peptide_will_be_added_to_the__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 run was not converted due to insufficient correlation.. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_run_was_not_converted_due_to_insufficient_correlation { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_run_was_not_converted_due_to_insufficient_c" + "orrelation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 run was successfully converted.. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_run_was_successfully_converted { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_1_run_was_successfully_converted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_Failed { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_Failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT database. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_iRT_database { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_iRT_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No new peptides will be added to the {0}.. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_No_new_peptides_will_be_added_to_the__0__ { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_No_new_peptides_will_be_added_to_the__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression Attempted. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression_Attempted { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression_Attempted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression Refined. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression_Refined { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_Regression_Refined", resourceCulture); } } /// <summary> /// Looks up a localized string similar to spectral library. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_spectral_library { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_spectral_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Success. /// </summary> public static string AddIrtPeptidesDlg_AddIrtPeptidesDlg_Success { get { return ResourceManager.GetString("AddIrtPeptidesDlg_AddIrtPeptidesDlg_Success", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT. /// </summary> public static string AddIrtPeptidesDlg_dataGridView_CellContentClick_iRT { get { return ResourceManager.GetString("AddIrtPeptidesDlg_dataGridView_CellContentClick_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectral Libraries. /// </summary> public static string AddIrtSpectralLibrary_btnBrowseFile_Click_Spectral_Libraries { get { return ResourceManager.GetString("AddIrtSpectralLibrary_btnBrowseFile_Click_Spectral_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only BiblioSpec and Chromatogram libraries contain enough retention time information to support this operation.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_Only_BiblioSpec_and_Chromatogram_libraries_contain_enough_retention_time_information_to_support_this_operation { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_Only_BiblioSpec_and_Chromatogram_libraries_contain" + "_enough_retention_time_information_to_support_this_operation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a non-redundant library.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_Please_choose_a_non_redundant_library { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_Please_choose_a_non_redundant_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose the library you would like to add.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_Please_choose_the_library_you_would_like_to_add { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_Please_choose_the_library_you_would_like_to_add", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a path to an existing spectral library.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_Please_specify_a_path_to_an_existing_spectral_library { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_Please_specify_a_path_to_an_existing_spectral_libr" + "ary", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} appears to be a redundant library.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_The_file__0__appears_to_be_a_redundant_library { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_The_file__0__appears_to_be_a_redundant_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_The_file__0__does_not_exist { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_The_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a BiblioSpec or Chromatogram library.. /// </summary> public static string AddIrtSpectralLibrary_OkDialog_The_file__0__is_not_a_BiblioSpec_or_Chromatogram_library { get { return ResourceManager.GetString("AddIrtSpectralLibrary_OkDialog_The_file__0__is_not_a_BiblioSpec_or_Chromatogram_l" + "ibrary", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured. /// </summary> public static string AddIrtsResultsDlg_dataGridView_CellContentClick_Measured { get { return ResourceManager.GetString("AddIrtsResultsDlg_dataGridView_CellContentClick_Measured", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current document contains {0} peptides not in this standard with measured retention times. It is suggested that you use a small number of peptides that can be easily measured in a single injection for an iRT standard. Choose a number of peptides below to have Skyline select automatically from the current document.. /// </summary> public static string AddIrtStandardsDlg_AddIrtStandardsDlg_MessagePeptidesExcluded { get { return ResourceManager.GetString("AddIrtStandardsDlg_AddIrtStandardsDlg_MessagePeptidesExcluded", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose the optimization library you would like to add.. /// </summary> public static string AddOptimizationDlg_OkDialog_Please_choose_the_optimization_library_you_would_like_to_add_ { get { return ResourceManager.GetString("AddOptimizationDlg_OkDialog_Please_choose_the_optimization_library_you_would_like" + "_to_add_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a path to an existing optimization library.. /// </summary> public static string AddOptimizationDlg_OkDialog_Please_specify_a_path_to_an_existing_optimization_library_ { get { return ResourceManager.GetString("AddOptimizationDlg_OkDialog_Please_specify_a_path_to_an_existing_optimization_lib" + "rary_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string AddOptimizationDlg_OkDialog_The_file__0__does_not_exist_ { get { return ResourceManager.GetString("AddOptimizationDlg_OkDialog_The_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not an optimization library.. /// </summary> public static string AddOptimizationDlg_OkDialog_The_file__0__is_not_an_optimization_library_ { get { return ResourceManager.GetString("AddOptimizationDlg_OkDialog_The_file__0__is_not_an_optimization_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 new optimization will be added to the library.. /// </summary> public static string AddOptimizationsDlg_AddOptimizationsDlg__1_new_optimization_will_be_added_to_the_library_ { get { return ResourceManager.GetString("AddOptimizationsDlg_AddOptimizationsDlg__1_new_optimization_will_be_added_to_the_" + "library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No new optimizations will be added to the library.. /// </summary> public static string AddOptimizationsDlg_AddOptimizationsDlg_No_new_optimizations_will_be_added_to_the_library_ { get { return ResourceManager.GetString("AddOptimizationsDlg_AddOptimizationsDlg_No_new_optimizations_will_be_added_to_the" + "_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following files are not valid library input files:. /// </summary> public static string AddPathsDlg_OkDialog_The_following_files_are_not_valid_library_input_files_ { get { return ResourceManager.GetString("AddPathsDlg_OkDialog_The_following_files_are_not_valid_library_input_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following files could not be found:. /// </summary> public static string AddPathsDlg_OkDialog_The_following_files_could_not_be_found_ { get { return ResourceManager.GetString("AddPathsDlg_OkDialog_The_following_files_could_not_be_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Comparing Imported Files. /// </summary> public static string AddPeakCompareDlg_OkDialog_Comparing_Imported_Files { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Comparing_Imported_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Comparing Models. /// </summary> public static string AddPeakCompareDlg_OkDialog_Comparing_Models { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Comparing_Models", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Comparison name cannot be empty.. /// </summary> public static string AddPeakCompareDlg_OkDialog_Comparison_name_cannot_be_empty_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Comparison_name_cannot_be_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document has no eligible chromatograms for analysis. Valid chromatograms must not be decoys or iRT standards.. /// </summary> public static string AddPeakCompareDlg_OkDialog_Document_has_no_eligible_chromatograms_for_analysis___Valid_chromatograms_must_not_be_decoys_or_iRT_standards_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Document_has_no_eligible_chromatograms_for_analysis___" + "Valid_chromatograms_must_not_be_decoys_or_iRT_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error applying imported peak boundaries: {0}. /// </summary> public static string AddPeakCompareDlg_OkDialog_Error_applying_imported_peak_boundaries___0_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Error_applying_imported_peak_boundaries___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error comparing model peak boundaries: {0}. /// </summary> public static string AddPeakCompareDlg_OkDialog_Error_comparing_model_peak_boundaries___0_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Error_comparing_model_peak_boundaries___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File path cannot be empty.. /// </summary> public static string AddPeakCompareDlg_OkDialog_File_path_cannot_be_empty_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_File_path_cannot_be_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File path field must contain a path to a valid file.. /// </summary> public static string AddPeakCompareDlg_OkDialog_File_path_field_must_contain_a_path_to_a_valid_file_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_File_path_field_must_contain_a_path_to_a_valid_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Model must be trained before it can be used for peak boundary comparison.. /// </summary> public static string AddPeakCompareDlg_OkDialog_Model_must_be_trained_before_it_can_be_used_for_peak_boundary_comparison_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_Model_must_be_trained_before_it_can_be_used_for_peak_b" + "oundary_comparison_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current file or model has no q values or scores to analyze. Either q values or scores are necessary to compare peak picking tools.. /// </summary> public static string AddPeakCompareDlg_OkDialog_The_current_file_or_model_has_no_q_values_or_scores_to_analyze___Either_q_values_or_scores_are_necessary_to_compare_peak_picking_tools_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_The_current_file_or_model_has_no_q_values_or_scores_to" + "_analyze___Either_q_values_or_scores_are_necessary_to_compare_peak_picking_tools" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The imported file does not contain any peak boundaries for {0} transition group / file pairs. These chromatograms will be treated as if no boundary was selected.. /// </summary> public static string AddPeakCompareDlg_OkDialog_The_imported_file_does_not_contain_any_peak_boundaries_for__0__transition_group___file_pairs___These_chromatograms_will_be_treated_as_if_no_boundary_was_selected_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_The_imported_file_does_not_contain_any_peak_boundaries" + "_for__0__transition_group___file_pairs___These_chromatograms_will_be_treated_as_" + "if_no_boundary_was_selected_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected file or model does not assign peak boundaries to any chromatograms in the document. Please select a different model or file.. /// </summary> public static string AddPeakCompareDlg_OkDialog_The_selected_file_or_model_does_not_assign_peak_boundaries_to_any_chromatograms_in_the_document___Please_select_a_different_model_or_file_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_The_selected_file_or_model_does_not_assign_peak_bounda" + "ries_to_any_chromatograms_in_the_document___Please_select_a_different_model_or_f" + "ile_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected model is already included in the list of comparisons. Please choose another model.. /// </summary> public static string AddPeakCompareDlg_OkDialog_The_selected_model_is_already_included_in_the_list_of_comparisons__Please_choose_another_model_ { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_The_selected_model_is_already_included_in_the_list_of_" + "comparisons__Please_choose_another_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already an imported file with the current name. Please choose another name. /// </summary> public static string AddPeakCompareDlg_OkDialog_There_is_already_an_imported_file_with_the_current_name___Please_choose_another_name { get { return ResourceManager.GetString("AddPeakCompareDlg_OkDialog_There_is_already_an_imported_file_with_the_current_nam" + "e___Please_choose_another_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A retention time predictor with that name already exists. Please choose a new name.. /// </summary> public static string AddRetentionTimePredictorDlg_OkDialog_A_retention_time_predictor_with_that_name_already_exists__Please_choose_a_new_name_ { get { return ResourceManager.GetString("AddRetentionTimePredictorDlg_OkDialog_A_retention_time_predictor_with_that_name_a" + "lready_exists__Please_choose_a_new_name_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adduct &quot;{0}&quot; calls for labeling more {1} atoms than are found in the molecule {2}. /// </summary> public static string Adduct_ApplyToMolecule_Adduct___0___calls_for_labeling_more__1__atoms_than_are_found_in_the_molecule__2_ { get { return ResourceManager.GetString("Adduct_ApplyToMolecule_Adduct___0___calls_for_labeling_more__1__atoms_than_are_fo" + "und_in_the_molecule__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adduct &quot;{0}&quot; calls for removing more {1} atoms than are found in the molecule {2}. /// </summary> public static string Adduct_ApplyToMolecule_Adduct___0___calls_for_removing_more__1__atoms_than_are_found_in_the_molecule__2_ { get { return ResourceManager.GetString("Adduct_ApplyToMolecule_Adduct___0___calls_for_removing_more__1__atoms_than_are_fo" + "und_in_the_molecule__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not parse isotopic label description &quot;{0}&quot; in adduct description &quot;{1}&quot;. ///Isotopic labels in adduct descriptions should be in the form of isotope counts (e.g. &quot;2Cl37&quot; or &quot;2Cl374N15&quot;), ///or a mass shift (e.g. &quot;1.234&quot; or &quot;(-1.234)&quot;). ///Recognized isotopes include: {2}. /// </summary> public static string Adduct_ParseDescription_isotope_error { get { return ResourceManager.GetString("Adduct_ParseDescription_isotope_error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A tool requires Program:{0} Version:{1} and it is not specified with the --tool-program-macro and --tool-program-path commands. Tool Installation Canceled.. /// </summary> public static string AddZipToolHelper_FindProgramPath_A_tool_requires_Program__0__Version__1__and_it_is_not_specified_with_the___tool_program_macro_and___tool_program_path_commands__Tool_Installation_Canceled_ { get { return ResourceManager.GetString("AddZipToolHelper_FindProgramPath_A_tool_requires_Program__0__Version__1__and_it_i" + "s_not_specified_with_the___tool_program_macro_and___tool_program_path_commands__" + "Tool_Installation_Canceled_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Package installation not handled in SkylineRunner. If you have already handled package installation use the --tool-ignore-required-packages flag. /// </summary> public static string AddZipToolHelper_InstallProgram_Error__Package_installation_not_handled_in_SkylineRunner___If_you_have_already_handled_package_installation_use_the___tool_ignore_required_packages_flag { get { return ResourceManager.GetString("AddZipToolHelper_InstallProgram_Error__Package_installation_not_handled_in_Skylin" + "eRunner___If_you_have_already_handled_package_installation_use_the___tool_ignore" + "_required_packages_flag", resourceCulture); } } /// <summary> /// Looks up a localized string similar to and a conflicting tool. /// </summary> public static string AddZipToolHelper_ShouldOverwrite__and_a_conflicting_tool { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite__and_a_conflicting_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to in the file {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite__in_the_file__0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite__in_the_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Conflicting report: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Conflicting_report___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Conflicting_report___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Conflicting reports: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Conflicting_reports___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Conflicting_reports___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Conflicting tool: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Conflicting_tool___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Conflicting_tool___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There are {0} conflicting reports. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Error__There_are__0__conflicting_reports { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Error__There_are__0__conflicting_reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There is a conflicting report. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Error__There_is_a_conflicting_report { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Error__There_is_a_conflicting_report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There is a conflicting tool. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Error__There_is_a_conflicting_tool { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Error__There_is_a_conflicting_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overwriting report: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Overwriting_report___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Overwriting_report___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overwriting reports: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Overwriting_reports___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Overwriting_reports___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overwriting tool: {0}. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Overwriting_tool___0_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Overwriting_tool___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify &apos;overwrite&apos; or &apos;parallel&apos; with the --tool-zip-conflict-resolution command.. /// </summary> public static string AddZipToolHelper_ShouldOverwrite_Please_specify__overwrite__or__parallel__with_the___tool_zip_conflict_resolution_command_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwrite_Please_specify__overwrite__or__parallel__with_th" + "e___tool_zip_conflict_resolution_command_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are annotations with conflicting names. Please use the --tool-zip-overwrite-annotations command.. /// </summary> public static string AddZipToolHelper_ShouldOverwriteAnnotations_There_are_annotations_with_conflicting_names__Please_use_the___tool_zip_overwrite_annotations_command_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwriteAnnotations_There_are_annotations_with_conflictin" + "g_names__Please_use_the___tool_zip_overwrite_annotations_command_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are conflicting annotations. Keeping existing.. /// </summary> public static string AddZipToolHelper_ShouldOverwriteAnnotations_There_are_conflicting_annotations__Keeping_existing_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwriteAnnotations_There_are_conflicting_annotations__Ke" + "eping_existing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are conflicting annotations. Overwriting.. /// </summary> public static string AddZipToolHelper_ShouldOverwriteAnnotations_There_are_conflicting_annotations__Overwriting_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwriteAnnotations_There_are_conflicting_annotations__Ov" + "erwriting_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: the annotation {0} is being overwritten. /// </summary> public static string AddZipToolHelper_ShouldOverwriteAnnotations_Warning__the_annotation__0__is_being_overwritten { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwriteAnnotations_Warning__the_annotation__0__is_being_" + "overwritten", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: the annotation {0} may not be what your tool requires.. /// </summary> public static string AddZipToolHelper_ShouldOverwriteAnnotations_Warning__the_annotation__0__may_not_be_what_your_tool_requires_ { get { return ResourceManager.GetString("AddZipToolHelper_ShouldOverwriteAnnotations_Warning__the_annotation__0__may_not_b" + "e_what_your_tool_requires_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid byte buffer for checksum.. /// </summary> public static string AdlerChecksum_MakeForBuff_Invalid_byte_buffer_for_checksum { get { return ResourceManager.GetString("AdlerChecksum_MakeForBuff_Invalid_byte_buffer_for_checksum", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to calculate a checksum for the file {0}. /// </summary> public static string AdlerChecksum_MakeForFile_Failure_attempting_to_calculate_a_checksum_for_the_file__0__ { get { return ResourceManager.GetString("AdlerChecksum_MakeForFile_Failure_attempting_to_calculate_a_checksum_for_the_file" + "__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid string &quot;{0}&quot; for checksum. /// </summary> public static string AdlerChecksum_MakeForString_Invalid_string___0___for_checksum { get { return ResourceManager.GetString("AdlerChecksum_MakeForString_Invalid_string___0___for_checksum", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown. /// </summary> public static string AdlerChecksum_ToString_Unknown { get { return ResourceManager.GetString("AdlerChecksum_ToString_Unknown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} CV. /// </summary> public static string AggregateOp_AxisTitleCv { get { return ResourceManager.GetString("AggregateOp_AxisTitleCv", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} CV (%). /// </summary> public static string AggregateOp_AxisTitleCvPercent { get { return ResourceManager.GetString("AggregateOp_AxisTitleCvPercent", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Abort. /// </summary> public static string AlertDlg_GetDefaultButtonText__Abort { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText__Abort", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Ignore. /// </summary> public static string AlertDlg_GetDefaultButtonText__Ignore { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText__Ignore", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;No. /// </summary> public static string AlertDlg_GetDefaultButtonText__No { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText__No", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Retry. /// </summary> public static string AlertDlg_GetDefaultButtonText__Retry { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText__Retry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Yes. /// </summary> public static string AlertDlg_GetDefaultButtonText__Yes { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText__Yes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel. /// </summary> public static string AlertDlg_GetDefaultButtonText_Cancel { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText_Cancel", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OK. /// </summary> public static string AlertDlg_GetDefaultButtonText_OK { get { return ResourceManager.GetString("AlertDlg_GetDefaultButtonText_OK", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Message truncated. Press Ctrl+C to copy entire message to the clipboard.. /// </summary> public static string AlertDlg_TruncateMessage_Message_truncated__Press_Ctrl_C_to_copy_entire_message_to_the_clipboard_ { get { return ResourceManager.GetString("AlertDlg_TruncateMessage_Message_truncated__Press_Ctrl_C_to_copy_entire_message_t" + "o_the_clipboard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}:{1}. /// </summary> public static string AlignedFile_AlignLibraryRetentionTimes__0__1__ { get { return ResourceManager.GetString("AlignedFile_AlignLibraryRetentionTimes__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Aligned Time. /// </summary> public static string AlignmentForm_UpdateGraph_Aligned_Time { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Aligned_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Alignment of {0} to {1}. /// </summary> public static string AlignmentForm_UpdateGraph_Alignment_of__0__to__1_ { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Alignment_of__0__to__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Outliers. /// </summary> public static string AlignmentForm_UpdateGraph_Outliers { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Outliers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string AlignmentForm_UpdateGraph_Peptides { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides Refined. /// </summary> public static string AlignmentForm_UpdateGraph_Peptides_Refined { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Peptides_Refined", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression line. /// </summary> public static string AlignmentForm_UpdateGraph_Regression_line { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Regression_line", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time from {0}. /// </summary> public static string AlignmentForm_UpdateGraph_Time_from__0__ { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Time_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time from Regression. /// </summary> public static string AlignmentForm_UpdateGraph_Time_from_Regression { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Time_from_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Waiting for retention time alignment. /// </summary> public static string AlignmentForm_UpdateGraph_Waiting_for_retention_time_alignment { get { return ResourceManager.GetString("AlignmentForm_UpdateGraph_Waiting_for_retention_time_alignment", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel import. /// </summary> public static string AllChromatogramsGraph_btnCancel_Click_Cancel_import { get { return ResourceManager.GetString("AllChromatogramsGraph_btnCancel_Click_Cancel_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel file. /// </summary> public static string AllChromatogramsGraph_btnCancelFile_Click_Cancel_file { get { return ResourceManager.GetString("AllChromatogramsGraph_btnCancelFile_Click_Cancel_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel file import. /// </summary> public static string AllChromatogramsGraph_Cancel_Cancel_file_import { get { return ResourceManager.GetString("AllChromatogramsGraph_Cancel_Cancel_file_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Close. /// </summary> public static string AllChromatogramsGraph_Finish_Close { get { return ResourceManager.GetString("AllChromatogramsGraph_Finish_Close", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove failed file. /// </summary> public static string AllChromatogramsGraph_RemoveFailedFile_Remove_failed_file { get { return ResourceManager.GetString("AllChromatogramsGraph_RemoveFailedFile_Remove_failed_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retry import results. /// </summary> public static string AllChromatogramsGraph_Retry_Retry_import_results { get { return ResourceManager.GetString("AllChromatogramsGraph_Retry_Retry_import_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} of {1} files. /// </summary> public static string AllChromatogramsGraph_UpdateStatus__0__of__1__files { get { return ResourceManager.GetString("AllChromatogramsGraph_UpdateStatus__0__of__1__files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Hide. /// </summary> public static string AllChromatogramsGraph_UpdateStatus_Hide { get { return ResourceManager.GetString("AllChromatogramsGraph_UpdateStatus_Hide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Joining chromatograms.... /// </summary> public static string AllChromatogramsGraph_UpdateStatus_Joining_chromatograms___ { get { return ResourceManager.GetString("AllChromatogramsGraph_UpdateStatus_Joining_chromatograms___", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap AllIonsStatusButton { get { object obj = ResourceManager.GetObject("AllIonsStatusButton", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Invalid amino acid &apos;{0}&apos; found in the value &apos;{1}&apos;.. /// </summary> public static string AminoAcid_ValidateAAList_Invalid_amino_acid__0__found_in_the_value__1__ { get { return ResourceManager.GetString("AminoAcid_ValidateAAList_Invalid_amino_acid__0__found_in_the_value__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The amino acid &apos;{0}&apos; is repeated in the value &apos;{1}&apos;.. /// </summary> public static string AminoAcid_ValidateAAList_The_amino_acid__0__is_repeated_in_the_value__1__ { get { return ResourceManager.GetString("AminoAcid_ValidateAAList_The_amino_acid__0__is_repeated_in_the_value__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Annotation: . /// </summary> public static string Annotation_DisambiguationPrefix_Annotation__ { get { return ResourceManager.GetString("Annotation_DisambiguationPrefix_Annotation__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string AnnotationDef_AnnotationTarget_Peptides { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Results. /// </summary> public static string AnnotationDef_AnnotationTarget_PrecursorResults { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_PrecursorResults", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursors. /// </summary> public static string AnnotationDef_AnnotationTarget_Precursors { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_Precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteins. /// </summary> public static string AnnotationDef_AnnotationTarget_Proteins { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_Proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicates. /// </summary> public static string AnnotationDef_AnnotationTarget_Replicates { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_Replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition Results. /// </summary> public static string AnnotationDef_AnnotationTarget_TransitionResults { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_TransitionResults", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transitions. /// </summary> public static string AnnotationDef_AnnotationTarget_Transitions { get { return ResourceManager.GetString("AnnotationDef_AnnotationTarget_Transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid value. /// </summary> public static string AnnotationDef_ValidationErrorMessage_Invalid_value { get { return ResourceManager.GetString("AnnotationDef_ValidationErrorMessage_Invalid_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Value must be a number. /// </summary> public static string AnnotationDef_ValidationErrorMessage_Value_must_be_a_number { get { return ResourceManager.GetString("AnnotationDef_ValidationErrorMessage_Value_must_be_a_number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Annotations:. /// </summary> public static string AnnotationDefList_Label_Annotations { get { return ResourceManager.GetString("AnnotationDefList_Label_Annotations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Define Annotations. /// </summary> public static string AnnotationDefList_Title_Define_Annotations { get { return ResourceManager.GetString("AnnotationDefList_Title_Define_Annotations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to False. /// </summary> public static string AnnotationHelper_GetReplicateIndicices_False { get { return ResourceManager.GetString("AnnotationHelper_GetReplicateIndicices_False", resourceCulture); } } /// <summary> /// Looks up a localized string similar to True. /// </summary> public static string AnnotationHelper_GetReplicateIndicices_True { get { return ResourceManager.GetString("AnnotationHelper_GetReplicateIndicices_True", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Annotation conflict for &apos;{0}&apos; found attempting to merge annotations.. /// </summary> public static string Annotations_Merge_Annotation_conflict_for__0__found_attempting_to_merge_annotations { get { return ResourceManager.GetString("Annotations_Merge_Annotation_conflict_for__0__found_attempting_to_merge_annotatio" + "ns", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not enough data. /// </summary> public static string AreaCVHistogram2DGraphPane_Draw_Not_enough_data { get { return ResourceManager.GetString("AreaCVHistogram2DGraphPane_Draw_Not_enough_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating .... /// </summary> public static string AreaCVHistogram2DGraphPane_UpdateGraph_Calculating____ { get { return ResourceManager.GetString("AreaCVHistogram2DGraphPane_UpdateGraph_Calculating____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CV. /// </summary> public static string AreaCVHistogram2DGraphPane_UpdateGraph_CV { get { return ResourceManager.GetString("AreaCVHistogram2DGraphPane_UpdateGraph_CV", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Log10 Mean Area. /// </summary> public static string AreaCvHistogram2DGraphPane_UpdateGraph_Log10_Mean_Area { get { return ResourceManager.GetString("AreaCvHistogram2DGraphPane_UpdateGraph_Log10_Mean_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Median: {0}. /// </summary> public static string AreaCVHistogram2DGraphPane_UpdateGraph_Median___0_ { get { return ResourceManager.GetString("AreaCVHistogram2DGraphPane_UpdateGraph_Median___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating .... /// </summary> public static string AreaCVHistogramGraphPane_AddLabels_Calculating____ { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_AddLabels_Calculating____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Median: {0}. /// </summary> public static string AreaCVHistogramGraphPane_AddLabels_Median___0_ { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_AddLabels_Median___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not enough data. /// </summary> public static string AreaCVHistogramGraphPane_AddLabels_Not_enough_data { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_AddLabels_Not_enough_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Below {0}: {1}. /// </summary> public static string AreaCVHistogramGraphPane_UpdateGraph_Below__0____1_ { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_UpdateGraph_Below__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CV. /// </summary> public static string AreaCVHistogramGraphPane_UpdateGraph_CV { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_UpdateGraph_CV", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Frequency. /// </summary> public static string AreaCVHistogramGraphPane_UpdateGraph_Frequency { get { return ResourceManager.GetString("AreaCVHistogramGraphPane_UpdateGraph_Frequency", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Global standards. /// </summary> public static string AreaCVToolbar_UpdateUI_Global_standards { get { return ResourceManager.GetString("AreaCVToolbar_UpdateUI_Global_standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid CV cutoff entered. /// </summary> public static string AreaCvToolbarProperties_btnOk_Click_Invalid_CV_cutoff_entered { get { return ResourceManager.GetString("AreaCvToolbarProperties_btnOk_Click_Invalid_CV_cutoff_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid maximum CV entered. /// </summary> public static string AreaCVToolbarProperties_btnOk_Click_Invalid_maximum_CV_entered { get { return ResourceManager.GetString("AreaCVToolbarProperties_btnOk_Click_Invalid_maximum_CV_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid maximum frequency entered. /// </summary> public static string AreaCvToolbarProperties_btnOk_Click_Invalid_maximum_frequency_entered { get { return ResourceManager.GetString("AreaCvToolbarProperties_btnOk_Click_Invalid_maximum_frequency_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid maximum log10 area entered. /// </summary> public static string AreaCVToolbarProperties_btnOk_Click_Invalid_maximum_log_10_area_entered { get { return ResourceManager.GetString("AreaCVToolbarProperties_btnOk_Click_Invalid_maximum_log_10_area_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid minimum log10 area entered. /// </summary> public static string AreaCVToolbarProperties_btnOk_Click_Invalid_minimum_log_10_area_entered { get { return ResourceManager.GetString("AreaCVToolbarProperties_btnOk_Click_Invalid_minimum_log_10_area_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid Q value entered. /// </summary> public static string AreaCvToolbarProperties_btnOk_Click_Invalid_Q_value_entered { get { return ResourceManager.GetString("AreaCvToolbarProperties_btnOk_Click_Invalid_Q_value_entered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The maximum log10 area has to be greater than the minimum log10 area. /// </summary> public static string AreaCVToolbarProperties_btnOk_Click_The_maximum_log10_area_has_to_be_greater_than_the_minimum_log10_area { get { return ResourceManager.GetString("AreaCVToolbarProperties_btnOk_Click_The_maximum_log10_area_has_to_be_greater_than" + "_the_minimum_log10_area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area. /// </summary> public static string AreaPeptideGraphPane_UpdateAxes_Peak_Area { get { return ResourceManager.GetString("AreaPeptideGraphPane_UpdateAxes_Peak_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected. /// </summary> public static string AreaReplicateGraphPane_InitFromData_Expected { get { return ResourceManager.GetString("AreaReplicateGraphPane_InitFromData_Expected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library. /// </summary> public static string AreaReplicateGraphPane_InitFromData_Library { get { return ResourceManager.GetString("AreaReplicateGraphPane_InitFromData_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results available. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_No_results_available { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_No_results_available", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Peak_Area { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Peak_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area Normalized. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Peak_Area_Normalized { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Peak_Area_Normalized", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area Percentage. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Peak_Area_Percentage { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Peak_Area_Percentage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area Ratio To {0}. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Peak_Area_Ratio_To__0_ { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Peak_Area_Ratio_To__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Percent of Regression Peak Area. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Percent_of_Regression_Peak_Area { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Percent_of_Regression_Peak_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select a peptide to see the peak area graph. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_peak_area_graph { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_peak_area_graph", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Step {0}. /// </summary> public static string AreaReplicateGraphPane_UpdateGraph_Step__0_ { get { return ResourceManager.GetString("AreaReplicateGraphPane_UpdateGraph_Step__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Associated proteins. /// </summary> public static string AssociateProteinsDlg_ApplyChanges_Associated_proteins { get { return ResourceManager.GetString("AssociateProteinsDlg_ApplyChanges_Associated_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No matches were found using the imported fasta file.. /// </summary> public static string AssociateProteinsDlg_FindProteinMatchesWithFasta_No_matches_were_found_using_the_imported_fasta_file_ { get { return ResourceManager.GetString("AssociateProteinsDlg_FindProteinMatchesWithFasta_No_matches_were_found_using_the_" + "imported_fasta_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No background proteome defined, see the Digestion tab in Peptide Settings for more information.. /// </summary> public static string AssociateProteinsDlg_UseBackgroundProteome_No_background_proteome_defined { get { return ResourceManager.GetString("AssociateProteinsDlg_UseBackgroundProteome_No_background_proteome_defined", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No matches were found using the background proteome.. /// </summary> public static string AssociateProteinsDlg_UseBackgroundProteome_No_matches_were_found_using_the_background_proteome_ { get { return ResourceManager.GetString("AssociateProteinsDlg_UseBackgroundProteome_No_matches_were_found_using_the_backgr" + "ound_proteome_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There was an error reading from the file.. /// </summary> public static string AssociateProteinsDlg_UseFastaFile_There_was_an_error_reading_from_the_file_ { get { return ResourceManager.GetString("AssociateProteinsDlg_UseFastaFile_There_was_an_error_reading_from_the_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Intensity. /// </summary> public static string AsyncChromatogramsGraph_AsyncChromatogramsGraph_Intensity { get { return ResourceManager.GetString("AsyncChromatogramsGraph_AsyncChromatogramsGraph_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string AsyncChromatogramsGraph_AsyncChromatogramsGraph_Retention_Time { get { return ResourceManager.GetString("AsyncChromatogramsGraph_AsyncChromatogramsGraph_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}, sample {1}. /// </summary> public static string AsyncChromatogramsGraph_Render__0___sample__1_ { get { return ResourceManager.GetString("AsyncChromatogramsGraph_Render__0___sample__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Canceled. /// </summary> public static string AsyncChromatogramsGraph2_AsyncChromatogramsGraph2_Canceled { get { return ResourceManager.GetString("AsyncChromatogramsGraph2_AsyncChromatogramsGraph2_Canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Examining background proteome for uniqueness constraints. /// </summary> public static string BackgroundProteome_GetUniquenessDict_Examining_background_proteome_for_uniqueness_constraints { get { return ResourceManager.GetString("BackgroundProteome_GetUniquenessDict_Examining_background_proteome_for_uniqueness" + "_constraints", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Background Proteomes:. /// </summary> public static string BackgroundProteomeList_Label_Background_Proteomes { get { return ResourceManager.GetString("BackgroundProteomeList_Label_Background_Proteomes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Background Proteomes. /// </summary> public static string BackgroundProteomeList_Title_Edit_Background_Proteomes { get { return ResourceManager.GetString("BackgroundProteomeList_Title_Edit_Background_Proteomes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed updating background proteome {0}.. /// </summary> public static string BackgroundProteomeManager_LoadBackground_Failed_updating_background_proteome__0__ { get { return ResourceManager.GetString("BackgroundProteomeManager_LoadBackground_Failed_updating_background_proteome__0__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolving protein details for {0} proteome. /// </summary> public static string BackgroundProteomeManager_LoadBackground_Resolving_protein_details_for__0__proteome { get { return ResourceManager.GetString("BackgroundProteomeManager_LoadBackground_Resolving_protein_details_for__0__proteo" + "me", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to rename temporary file to {0}.. /// </summary> public static string BackgroundProteomeManager_LoadBackground_Unable_to_rename_temporary_file_to__0__ { get { return ResourceManager.GetString("BackgroundProteomeManager_LoadBackground_Unable_to_rename_temporary_file_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to After minimizing, the cache file will be reduced to {0:0%} its current size. /// </summary> public static string BackgroundWorker_UpdateStatistics_After_minimizing_the_cache_file_will_be_reduced_to__0__its_current_size { get { return ResourceManager.GetString("BackgroundWorker_UpdateStatistics_After_minimizing_the_cache_file_will_be_reduced" + "_to__0__its_current_size", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Computing space savings ({0}% complete). /// </summary> public static string BackgroundWorker_UpdateStatistics_Computing_space_savings__0__complete { get { return ResourceManager.GetString("BackgroundWorker_UpdateStatistics_Computing_space_savings__0__complete", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current size of the cache file is {0:fs}. /// </summary> public static string BackgroundWorker_UpdateStatistics_The_current_size_of_the_cache_file_is__0__fs { get { return ResourceManager.GetString("BackgroundWorker_UpdateStatistics_The_current_size_of_the_cache_file_is__0__fs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data truncation in library header. File may be corrupted.. /// </summary> public static string BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data truncation in spectrum header. File may be corrupted.. /// </summary> public static string BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data truncation in spectrum sequence. File may be corrupted.. /// </summary> public static string BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed loading library &apos;{0}&apos;.. /// </summary> public static string BiblioSpecLibrary_Load_Failed_loading_library__0__ { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Failed_loading_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid precursor charge found. File may be corrupted.. /// </summary> public static string BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0} library. /// </summary> public static string BiblioSpecLibrary_Load_Loading__0__library { get { return ResourceManager.GetString("BiblioSpecLibrary_Load_Loading__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure trying to read peaks. /// </summary> public static string BiblioSpecLibrary_ReadSpectrum_Failure_trying_to_read_peaks { get { return ResourceManager.GetString("BiblioSpecLibrary_ReadSpectrum_Failure_trying_to_read_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Legacy BiblioSpec Library. /// </summary> public static string BiblioSpecLibrary_SpecFilter_Legacy_BiblioSpec_Library { get { return ResourceManager.GetString("BiblioSpecLibrary_SpecFilter_Legacy_BiblioSpec_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library built successfully. Spectra matching the following peptides had multiple ambiguous peptide matches and were excluded:. /// </summary> public static string BiblioSpecLiteBuilder_AmbiguousMatches_The_library_built_successfully__Spectra_matching_the_following_peptides_had_multiple_ambiguous_peptide_matches_and_were_excluded_ { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_AmbiguousMatches_The_library_built_successfully__Spectra_ma" + "tching_the_following_peptides_had_multiple_ambiguous_peptide_matches_and_were_ex" + "cluded_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building {0} library. /// </summary> public static string BiblioSpecLiteBuilder_BuildLibrary_Building__0__library { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_BuildLibrary_Building__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed trying to build the library {0}.. /// </summary> public static string BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed trying to build the redundant library {0}.. /// </summary> public static string BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__" + "0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preparing to build library. /// </summary> public static string BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Embedded. /// </summary> public static string BiblioSpecLiteBuilder_Embedded { get { return ResourceManager.GetString("BiblioSpecLiteBuilder_Embedded", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Aligning library retention times. /// </summary> public static string BiblioSpecLiteLibrary_CalculateFileRetentionTimeAlignments_Aligning_library_retention_times { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_CalculateFileRetentionTimeAlignments_Aligning_library_reten" + "tion_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading library header for {0}.. /// </summary> public static string BiblioSpecLiteLibrary_CreateCache_Failed_reading_library_header_for__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_CreateCache_Failed_reading_library_header_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No spectra were found in the library {0}. /// </summary> public static string BiblioSpecLiteLibrary_CreateCache_No_spectra_were_found_in_the_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_CreateCache_No_spectra_were_found_in_the_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to get a valid count of spectra in the library {0}. /// </summary> public static string BiblioSpecLiteLibrary_CreateCache_Unable_to_get_a_valid_count_of_spectra_in_the_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_CreateCache_Unable_to_get_a_valid_count_of_spectra_in_the_l" + "ibrary__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to filter redundant library {0} to {1}. /// </summary> public static string BiblioSpecLiteLibrary_DeleteDataFiles_Failed_attempting_to_filter_redundant_library__0__to__1_ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_DeleteDataFiles_Failed_attempting_to_filter_redundant_libra" + "ry__0__to__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removing library runs from document library.. /// </summary> public static string BiblioSpecLiteLibrary_DeleteDataFiles_Removing_library_runs_from_document_library_ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_DeleteDataFiles_Removing_library_runs_from_document_library" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building binary cache for {0} library. /// </summary> public static string BiblioSpecLiteLibrary_Load_Building_binary_cache_for__0__library { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_Load_Building_binary_cache_for__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed loading library &apos;{0}&apos;.. /// </summary> public static string BiblioSpecLiteLibrary_Load_Failed_loading_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_Load_Failed_loading_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid precursor charge {0} found. File may be corrupted.. /// </summary> public static string BiblioSpecLiteLibrary_Load_Invalid_precursor_charge__0__found__File_may_be_corrupted { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_Load_Invalid_precursor_charge__0__found__File_may_be_corrup" + "ted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectrum peaks {0} excede the maximum allowed {1}.. /// </summary> public static string BiblioSpecLiteLibrary_ReadRedundantSpectrum_Spectrum_peaks__0__excede_the_maximum_allowed__1__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_ReadRedundantSpectrum_Spectrum_peaks__0__excede_the_maximum" + "_allowed__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The redundant library {0} does not exist.. /// </summary> public static string BiblioSpecLiteLibrary_ReadRedundantSpectrum_The_redundant_library__0__does_not_exist { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_ReadRedundantSpectrum_The_redundant_library__0__does_not_ex" + "ist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected SQLite failure reading {0}.. /// </summary> public static string BiblioSpecLiteLibrary_ReadSpectrum_Unexpected_SQLite_failure_reading__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_ReadSpectrum_Unexpected_SQLite_failure_reading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to get a valid count of all spectra in the library {0}. /// </summary> public static string BiblioSpecLiteLibrary_RetentionTimesPsmCount_Unable_to_get_a_valid_count_of_all_spectra_in_the_library__0__ { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_RetentionTimesPsmCount_Unable_to_get_a_valid_count_of_all_s" + "pectra_in_the_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to BiblioSpec Library. /// </summary> public static string BiblioSpecLiteLibrary_SpecFilter_BiblioSpec_Library { get { return ResourceManager.GetString("BiblioSpecLiteLibrary_SpecFilter_BiblioSpec_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0} library,. /// </summary> public static string BiblioSpecLiteLibraryLoadLoading__0__library { get { return ResourceManager.GetString("BiblioSpecLiteLibraryLoadLoading__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to BiblioSpec Library. /// </summary> public static string BiblioSpecLiteSpec_FILTER_BLIB_BiblioSpec_Library { get { return ResourceManager.GetString("BiblioSpecLiteSpec_FILTER_BLIB_BiblioSpec_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed parsing adduct description &quot;{0}&quot;. /// </summary> public static string BioMassCalc_ApplyAdductToFormula_Failed_parsing_adduct_description___0__ { get { return ResourceManager.GetString("BioMassCalc_ApplyAdductToFormula_Failed_parsing_adduct_description___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed parsing adduct description &quot;{0}&quot;: declared charge {1} does not agree with calculated charge {2}. /// </summary> public static string BioMassCalc_ApplyAdductToFormula_Failed_parsing_adduct_description___0____declared_charge__1__does_not_agree_with_calculated_charge__2_ { get { return ResourceManager.GetString("BioMassCalc_ApplyAdductToFormula_Failed_parsing_adduct_description___0____declare" + "d_charge__1__does_not_agree_with_calculated_charge__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown symbol &quot;{0}&quot; in adduct description &quot;{1}&quot;. /// </summary> public static string BioMassCalc_ApplyAdductToFormula_Unknown_symbol___0___in_adduct_description___1__ { get { return ResourceManager.GetString("BioMassCalc_ApplyAdductToFormula_Unknown_symbol___0___in_adduct_description___1__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The expression &apos;{0}&apos; is not a valid chemical formula.. /// </summary> public static string BioMassCalc_CalculateMass_The_expression__0__is_not_a_valid_chemical_formula { get { return ResourceManager.GetString("BioMassCalc_CalculateMass_The_expression__0__is_not_a_valid_chemical_formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Supported chemical symbols include: . /// </summary> public static string BioMassCalc_FormatArgumentException__Supported_chemical_symbols_include__ { get { return ResourceManager.GetString("BioMassCalc_FormatArgumentException__Supported_chemical_symbols_include__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixing isotope abundance masses requires a monoisotopic mass calculator. /// </summary> public static string BioMassCalc_SynchMasses_Fixing_isotope_abundance_masses_requires_a_monoisotopic_mass_calculator { get { return ResourceManager.GetString("BioMassCalc_SynchMasses_Fixing_isotope_abundance_masses_requires_a_monoisotopic_m" + "ass_calculator", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Blank { get { object obj = ResourceManager.GetObject("Blank", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Error getting database Id for file {0}.. /// </summary> public static string BlibDb_BuildRefSpectra_Error_getting_database_Id_for_file__0__ { get { return ResourceManager.GetString("BlibDb_BuildRefSpectra_Error_getting_database_Id_for_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiple reference spectra found for peptide {0} in the library {1}.. /// </summary> public static string BlibDb_BuildRefSpectra_Multiple_reference_spectra_found_for_peptide__0__in_the_library__1__ { get { return ResourceManager.GetString("BlibDb_BuildRefSpectra_Multiple_reference_spectra_found_for_peptide__0__in_the_li" + "brary__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Creating spectral library for imported transition list. /// </summary> public static string BlibDb_CreateLibraryFromSpectra_Creating_spectral_library_for_imported_transition_list { get { return ResourceManager.GetString("BlibDb_CreateLibraryFromSpectra_Creating_spectral_library_for_imported_transition" + "_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Libraries must be fully loaded before they can be minimized.. /// </summary> public static string BlibDb_MinimizeLibraries_Libraries_must_be_fully_loaded_before_they_can_be_minimzed { get { return ResourceManager.GetString("BlibDb_MinimizeLibraries_Libraries_must_be_fully_loaded_before_they_can_be_minimz" + "ed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Minimizing library {0}. /// </summary> public static string BlibDb_MinimizeLibrary_Minimizing_library__0__ { get { return ResourceManager.GetString("BlibDb_MinimizeLibrary_Minimizing_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected sorted data. /// </summary> public static string Block_VerifySort_Expected_sorted_data { get { return ResourceManager.GetString("Block_VerifySort_Expected_sorted_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No such chrominfo: {0}. /// </summary> public static string BookmarkEnumerator_Current_No_such_chrominfo__0__ { get { return ResourceManager.GetString("BookmarkEnumerator_Current_No_such_chrominfo__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No such node: {0}. /// </summary> public static string BookmarkEnumerator_Current_No_such_node__0__ { get { return ResourceManager.GetString("BookmarkEnumerator_Current_No_such_node__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;NoPeptide&gt;. /// </summary> public static string BookmarkEnumerator_GetLocationName_NoPeptide { get { return ResourceManager.GetString("BookmarkEnumerator_GetLocationName_NoPeptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;UnknownFile&gt;. /// </summary> public static string BookmarkEnumerator_GetLocationName_UnknownFile { get { return ResourceManager.GetString("BookmarkEnumerator_GetLocationName_UnknownFile", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results. /// </summary> public static string BookmarkEnumerator_GetLocationType_Results { get { return ResourceManager.GetString("BookmarkEnumerator_GetLocationType_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein. /// </summary> public static string BookmarkEnumerator_GetNodeTypeName_Protein { get { return ResourceManager.GetString("BookmarkEnumerator_GetNodeTypeName_Protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown. /// </summary> public static string BookmarkEnumerator_GetNodeTypeName_Unknown { get { return ResourceManager.GetString("BookmarkEnumerator_GetNodeTypeName_Unknown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error copying template file.. /// </summary> public static string BrukerTimsTofMethodExporter_ExportMethod_Error_copying_template_file_ { get { return ResourceManager.GetString("BrukerTimsTofMethodExporter_ExportMethod_Error_copying_template_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Getting scheduling.... /// </summary> public static string BrukerTimsTofMethodExporter_ExportMethod_Getting_scheduling___ { get { return ResourceManager.GetString("BrukerTimsTofMethodExporter_ExportMethod_Getting_scheduling___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduling failure (no targets?). /// </summary> public static string BrukerTimsTofMethodExporter_ExportMethod_Scheduling_failure__no_targets__ { get { return ResourceManager.GetString("BrukerTimsTofMethodExporter_ExportMethod_Scheduling_failure__no_targets__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Template is required for method export.. /// </summary> public static string BrukerTimsTofMethodExporter_ExportMethod_Template_is_required_for_method_export_ { get { return ResourceManager.GetString("BrukerTimsTofMethodExporter_ExportMethod_Template_is_required_for_method_export_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to add the FASTA file {0}.. /// </summary> public static string BuildBackgroundProteomeDlg_AddFastaFile_An_error_occurred_attempting_to_add_the_FASTA_file__0__ { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_AddFastaFile_An_error_occurred_attempting_to_add_the_F" + "ASTA_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The added file included {0} repeated protein sequences. Their names were added as aliases to ensure the protein list contains only one copy of each sequence.. /// </summary> public static string BuildBackgroundProteomeDlg_AddFastaFile_The_added_file_included__0__repeated_protein_sequences__Their_names_were_added_as_aliases_to_ensure_the_protein_list_contains_only_one_copy_of_each_sequence_ { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_AddFastaFile_The_added_file_included__0__repeated_prot" + "ein_sequences__Their_names_were_added_as_aliases_to_ensure_the_protein_list_cont" + "ains_only_one_copy_of_each_sequence_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add FASTA File. /// </summary> public static string BuildBackgroundProteomeDlg_btnAddFastaFile_Click_Add_FASTA_File { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_btnAddFastaFile_Click_Add_FASTA_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to create the proteome file {0}.. /// </summary> public static string BuildBackgroundProteomeDlg_btnCreate_Click_An_error_occurred_attempting_to_create_the_proteome_file__0__ { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_btnCreate_Click_An_error_occurred_attempting_to_create" + "_the_proteome_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Create Background Proteome. /// </summary> public static string BuildBackgroundProteomeDlg_btnCreate_Click_Create_Background_Proteome { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_btnCreate_Click_Create_Background_Proteome", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Open Background Proteome. /// </summary> public static string BuildBackgroundProteomeDlg_btnOpen_Click_Open_Background_Protoeme { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_btnOpen_Click_Open_Background_Protoeme", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteome File. /// </summary> public static string BuildBackgroundProteomeDlg_FILTER_PROTDB_Proteome_File { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_FILTER_PROTDB_Proteome_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose a valid proteome file, or click the &apos;Create&apos; button to create a new one from FASTA files.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_Choose_a_valid_proteome_file__or_click_the__Create__button_to_create_a_new_one_from_FASTA_files { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_Choose_a_valid_proteome_file__or_click_the__C" + "reate__button_to_create_a_new_one_from_FASTA_files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a full path to the proteome file.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_Please_specify_a_full_path_to_the_proteome_file { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_Please_specify_a_full_path_to_the_proteome_fi" + "le", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The background proteome &apos;{0}&apos; already exists.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_The_background_proteome__0__already_exists { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_The_background_proteome__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The proteome file {0} does not exist.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_The_proteome_file__0__does_not_exist { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_The_proteome_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The proteome file is not valid.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_The_proteome_file_is_not_valid { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_The_proteome_file_is_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must specify a proteome file.. /// </summary> public static string BuildBackgroundProteomeDlg_OkDialog_You_must_specify_a_proteome_file { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_OkDialog_You_must_specify_a_proteome_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click the &apos;Add File&apos; button to add a FASTA file, and create a new proteome file.. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_Click_the_Add_File_button_to_add_a_FASTA_file_and_create_a_new_proteome_file { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_Click_the_Add_File_button_to_add_a_FASTA" + "_file_and_create_a_new_proteome_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click the &apos;Open&apos; button to choose an existing proteome file, or click the &apos;Create&apos; button to create a new proteome file.. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_Click_the_Open_button_to_choose_an_existing_proteome_file_or_click_the_Create_button_to_create_a_new_proteome_file { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_Click_the_Open_button_to_choose_an_exist" + "ing_proteome_file_or_click_the_Create_button_to_create_a_new_proteome_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading protein information from {0}. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_Loading_protein_information_from__0__ { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_Loading_protein_information_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading Proteome File. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_Loading_Proteome_File { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_Loading_Proteome_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The proteome file contains {0} proteins.. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_The_proteome_file_contains__0__proteins { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_The_proteome_file_contains__0__proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The proteome has already been digested.. /// </summary> public static string BuildBackgroundProteomeDlg_RefreshStatus_The_proteome_has_already_been_digested { get { return ResourceManager.GetString("BuildBackgroundProteomeDlg_RefreshStatus_The_proteome_has_already_been_digested", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred reading files in the directory {0}.. /// </summary> public static string BuildLibraryDlg_AddDirectory_An_error_occurred_reading_files_in_the_directory__0__ { get { return ResourceManager.GetString("BuildLibraryDlg_AddDirectory_An_error_occurred_reading_files_in_the_directory__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find Input Files. /// </summary> public static string BuildLibraryDlg_AddDirectory_Find_Input_Files { get { return ResourceManager.GetString("BuildLibraryDlg_AddDirectory_Find_Input_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a valid library input file.. /// </summary> public static string BuildLibraryDlg_AddInputFiles_The_file__0__is_not_a_valid_library_input_file { get { return ResourceManager.GetString("BuildLibraryDlg_AddInputFiles_The_file__0__is_not_a_valid_library_input_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following files are not valid library input files:. /// </summary> public static string BuildLibraryDlg_AddInputFiles_The_following_files_are_not_valid_library_input_files { get { return ResourceManager.GetString("BuildLibraryDlg_AddInputFiles_The_following_files_are_not_valid_library_input_fil" + "es", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Input Directory. /// </summary> public static string BuildLibraryDlg_btnAddDirectory_Click_Add_Input_Directory { get { return ResourceManager.GetString("BuildLibraryDlg_btnAddDirectory_Click_Add_Input_Directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Input Files. /// </summary> public static string BuildLibraryDlg_btnAddFile_Click_Add_Input_Files { get { return ResourceManager.GetString("BuildLibraryDlg_btnAddFile_Click_Add_Input_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matched Peptides (. /// </summary> public static string BuildLibraryDlg_btnAddFile_Click_Matched_Peptides { get { return ResourceManager.GetString("BuildLibraryDlg_btnAddFile_Click_Matched_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Next &gt;. /// </summary> public static string BuildLibraryDlg_btnPrevious_Click__Next__ { get { return ResourceManager.GetString("BuildLibraryDlg_btnPrevious_Click__Next__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finding library input files in. /// </summary> public static string BuildLibraryDlg_FindInputFiles_Finding_library_input_files_in { get { return ResourceManager.GetString("BuildLibraryDlg_FindInputFiles_Finding_library_input_files_in", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finish. /// </summary> public static string BuildLibraryDlg_OkWizardPage_Finish { get { return ResourceManager.GetString("BuildLibraryDlg_OkWizardPage_Finish", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Access violation attempting to write to {0}.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_Access_violation_attempting_to_write_to__0__ { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_Access_violation_attempting_to_write_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to create a file in {0}.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_Failure_attempting_to_create_a_file_in__0__ { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_Failure_attempting_to_create_a_file_in__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please check that you have write access to this folder.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_Please_check_that_you_have_write_access_to_this_folder_ { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_Please_check_that_you_have_write_access_to_this_f" + "older_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The directory {0} does not exist.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_The_directory__0__does_not_exist { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_The_directory__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The lab authority name {0} is not valid. This should look like an internet server address (e.g. mylab.myu.edu), and be unlikely to be used by any other lab, but need not refer to an actual server.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_The_lab_authority_name__0__is_not_valid_This_should_look_like_an_internet_server_address_e_g_mylab_myu_edu_and_be_unlikely_to_be_used_by_any_other_lab_but_need_not_refer_to_an_actual_server { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_The_lab_authority_name__0__is_not_valid_This_shou" + "ld_look_like_an_internet_server_address_e_g_mylab_myu_edu_and_be_unlikely_to_be_" + "used_by_any_other_lab_but_need_not_refer_to_an_actual_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library identifier {0} is not valid. Identifiers start with a letter, number or underscore, and contain only letters, numbers, underscores and dashes.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_The_library_identifier__0__is_not_valid_Identifiers_start_with_a_letter_number_or_underscore_and_contain_only_letters_numbers_underscores_and_dashes { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_The_library_identifier__0__is_not_valid_Identifie" + "rs_start_with_a_letter_number_or_underscore_and_contain_only_letters_numbers_und" + "erscores_and_dashes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The output path {0} is a directory. You must specify a file path.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_The_output_path__0__is_a_directory_You_must_specify_a_file_path { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_The_output_path__0__is_a_directory_You_must_speci" + "fy_a_file_path", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must specify an output file path.. /// </summary> public static string BuildLibraryDlg_ValidateBuilder_You_must_specify_an_output_file_path { get { return ResourceManager.GetString("BuildLibraryDlg_ValidateBuilder_You_must_specify_an_output_file_path", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library {0}. /// </summary> public static string BuildLibraryNotification_BuildLibraryNotification_Library__0__ { get { return ResourceManager.GetString("BuildLibraryNotification_BuildLibraryNotification_Library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while processing retention times.. /// </summary> public static string BuildPeptideSearchLibraryControl_AddIrtLibraryTable_An_error_occurred_while_processing_retention_times_ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_AddIrtLibraryTable_An_error_occurred_while_proce" + "ssing_retention_times_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Processing Retention Times. /// </summary> public static string BuildPeptideSearchLibraryControl_AddIrtLibraryTable_Processing_Retention_Times { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_AddIrtLibraryTable_Processing_Retention_Times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add document spectral library. /// </summary> public static string BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Add_document_spectral_library { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Add_document_spectral_" + "library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building document library for peptide search.. /// </summary> public static string BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Building_document_library_for_peptide_search_ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Building_document_libr" + "ary_for_peptide_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building Peptide Search Library. /// </summary> public static string BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Building_Peptide_Search_Library { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Building_Peptide_Searc" + "h_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to build the library {0}.. /// </summary> public static string BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Failed_to_build_the_library__0__ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_BuildPeptideSearchLibrary_Failed_to_build_the_li" + "brary__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Files to search:. /// </summary> public static string BuildPeptideSearchLibraryControl_Files_to_search_ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_Files_to_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to import the {0} library.. /// </summary> public static string BuildPeptideSearchLibraryControl_LoadPeptideSearchLibrary_An_error_occurred_attempting_to_import_the__0__library_ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_LoadPeptideSearchLibrary_An_error_occurred_attem" + "pting_to_import_the__0__library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading Library. /// </summary> public static string BuildPeptideSearchLibraryControl_LoadPeptideSearchLibrary_Loading_Library { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_LoadPeptideSearchLibrary_Loading_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Result files:. /// </summary> public static string BuildPeptideSearchLibraryControl_Result_files_ { get { return ResourceManager.GetString("BuildPeptideSearchLibraryControl_Result_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading block from file.. /// </summary> public static string BulkReadException_BulkReadException_Failed_reading_block_from_file { get { return ResourceManager.GetString("BulkReadException_BulkReadException_Failed_reading_block_from_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library entry not found {0}.. /// </summary> public static string CachedLibrary_LoadSpectrum_Library_entry_not_found__0__ { get { return ResourceManager.GetString("CachedLibrary_LoadSpectrum_Library_entry_not_found__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid entries were found in the library &apos;{0}&apos;. ///{1} of the {2} peptides or molecules were invalid, including: ///{3}. /// </summary> public static string CachedLibrary_WarnInvalidEntries_ { get { return ResourceManager.GetString("CachedLibrary_WarnInvalidEntries_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start value must be less than End value.. /// </summary> public static string CalculateIsolationSchemeDlg_OkDialog_Start_value_must_be_less_than_End_value { get { return ResourceManager.GetString("CalculateIsolationSchemeDlg_OkDialog_Start_value_must_be_less_than_End_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number of generated windows could not be adjusted to be a multiple of the windows per scan. Try changing the windows per scan or the End value.. /// </summary> public static string CalculateIsolationSchemeDlg_OkDialog_The_number_of_generated_windows_could_not_be_adjusted_to_be_a_multiple_of_the_windows_per_scan_Try_changing_the_windows_per_scan_or_the_End_value { get { return ResourceManager.GetString("CalculateIsolationSchemeDlg_OkDialog_The_number_of_generated_windows_could_not_be" + "_adjusted_to_be_a_multiple_of_the_windows_per_scan_Try_changing_the_windows_per_" + "scan_or_the_End_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Window width must be an integer value when optimize window placement is selected.. /// </summary> public static string CalculateIsolationSchemeDlg_OkDialog_Window_width_must_be_an_integer { get { return ResourceManager.GetString("CalculateIsolationSchemeDlg_OkDialog_Window_width_must_be_an_integer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Window width must be less than or equal to the isolation range.. /// </summary> public static string CalculateIsolationSchemeDlg_OkDialog_Window_width_must_be_less_than_or_equal_to_the_isolation_range { get { return ResourceManager.GetString("CalculateIsolationSchemeDlg_OkDialog_Window_width_must_be_less_than_or_equal_to_t" + "he_isolation_range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Odd numbered window widths are not supported for overlapped demultiplexing with optimized window placement selected.. /// </summary> public static string CalculateIsolationSchemeDlg_OkDialog_Window_width_not_even { get { return ResourceManager.GetString("CalculateIsolationSchemeDlg_OkDialog_Window_width_not_even", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Calculator { get { object obj = ResourceManager.GetObject("Calculator", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Regression equation calculation. /// </summary> public static string CalibrateIrtDlg_btnGraph_Click_Regression_equation_calculation { get { return ResourceManager.GetString("CalibrateIrtDlg_btnGraph_Click_Regression_equation_calculation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calibrated iRT values. /// </summary> public static string CalibrateIrtDlg_btnGraphIrts_Click_Calibrated_iRT_values { get { return ResourceManager.GetString("CalibrateIrtDlg_btnGraphIrts_Click_Calibrated_iRT_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter at least {0} standard peptides.. /// </summary> public static string CalibrateIrtDlg_OkDialog_Please_enter_at_least__0__standard_peptides_ { get { return ResourceManager.GetString("CalibrateIrtDlg_OkDialog_Please_enter_at_least__0__standard_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The iRT standard {0} already exists.. /// </summary> public static string CalibrateIrtDlg_OkDialog_The_iRT_standard__0__already_exists_ { get { return ResourceManager.GetString("CalibrateIrtDlg_OkDialog_The_iRT_standard__0__already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains results for {0} peptide(s) not in this standard, which is less than the minimum requirement of {1} to calibrate a standard.. /// </summary> public static string CalibrateIrtDlg_SetCalibrationPeptides_The_document_contains_results_for__0__peptide_s__not_in_this_standard__which_is_less_than_the_minimum_requirement_of__1__to_calibrate_a_standard_ { get { return ResourceManager.GetString("CalibrateIrtDlg_SetCalibrationPeptides_The_document_contains_results_for__0__pept" + "ide_s__not_in_this_standard__which_is_less_than_the_minimum_requirement_of__1__t" + "o_calibrate_a_standard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT. /// </summary> public static string CalibrateIrtDlg_ShowGraph_iRT { get { return ResourceManager.GetString("CalibrateIrtDlg_ShowGraph_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured. /// </summary> public static string CalibrateIrtDlg_ShowGraph_Measured { get { return ResourceManager.GetString("CalibrateIrtDlg_ShowGraph_Measured", resourceCulture); } } /// <summary> /// Looks up a localized string similar to New iRT. /// </summary> public static string CalibrateIrtDlg_ShowGraph_New_iRT { get { return ResourceManager.GetString("CalibrateIrtDlg_ShowGraph_New_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Old iRT. /// </summary> public static string CalibrateIrtDlg_ShowGraph_Old_iRT { get { return ResourceManager.GetString("CalibrateIrtDlg_ShowGraph_Old_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} peptides. /// </summary> public static string CalibrateIrtDlg_StandardsChanged__0__peptides { get { return ResourceManager.GetString("CalibrateIrtDlg_StandardsChanged__0__peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 peptide. /// </summary> public static string CalibrateIrtDlg_StandardsChanged__1_peptide { get { return ResourceManager.GetString("CalibrateIrtDlg_StandardsChanged__1_peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid fixed point peptides.. /// </summary> public static string CalibrateIrtDlg_TryGetLine_Invalid_fixed_point_peptides_ { get { return ResourceManager.GetString("CalibrateIrtDlg_TryGetLine_Invalid_fixed_point_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Maximum fixed point peptide must have a greater measured retention time than the minimum fixed point peptide.. /// </summary> public static string CalibrateIrtDlg_TryGetLine_Maximum_fixed_point_peptide_must_have_a_greater_measured_retention_time_than_the_minimum_fixed_point_peptide_ { get { return ResourceManager.GetString("CalibrateIrtDlg_TryGetLine_Maximum_fixed_point_peptide_must_have_a_greater_measur" + "ed_retention_time_than_the_minimum_fixed_point_peptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard calibration peptides are required.. /// </summary> public static string CalibrateIrtDlg_TryGetLine_Standard_calibration_peptides_are_required_ { get { return ResourceManager.GetString("CalibrateIrtDlg_TryGetLine_Standard_calibration_peptides_are_required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The standard must have two fixed points.. /// </summary> public static string CalibrateIrtDlg_TryGetLine_The_standard_must_have_two_fixed_points { get { return ResourceManager.GetString("CalibrateIrtDlg_TryGetLine_The_standard_must_have_two_fixed_points", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains results for {0} peptides, but using fewer than {1} standard peptides is not recommended. Are you sure you want to continue?. /// </summary> public static string CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides__but_using_fewer_than__1__standard_peptides_is_not_recommended__Are_you_sure_you_want_to_continue_ { get { return ResourceManager.GetString("CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides__but_us" + "ing_fewer_than__1__standard_peptides_is_not_recommended__Are_you_sure_you_want_t" + "o_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains results for {0} peptides, which is less than the minimum requirement of {1} to calibrate a standard.. /// </summary> public static string CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides__which_is_less_than_the_minimum_requirement_of__1__to_calibrate_a_standard_ { get { return ResourceManager.GetString("CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides__which_" + "is_less_than_the_minimum_requirement_of__1__to_calibrate_a_standard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains results for {0} peptides not in this standard, but using fewer than {1} standard peptides is not recommended. Are you sure you want to continue?. /// </summary> public static string CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides_not_in_this_standard__but_using_fewer_than__1__standard_peptides_is_not_recommended__Are_you_sure_you_want_to_continue_ { get { return ResourceManager.GetString("CalibrateIrtDlg_UseResults_The_document_contains_results_for__0__peptides_not_in_" + "this_standard__but_using_fewer_than__1__standard_peptides_is_not_recommended__Ar" + "e_you_sure_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain results to calibrate a standard.. /// </summary> public static string CalibrateIrtDlg_UseResults_The_document_must_contain_results_to_calibrate_a_standard { get { return ResourceManager.GetString("CalibrateIrtDlg_UseResults_The_document_must_contain_results_to_calibrate_a_stand" + "ard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to calculate the calibration curve for the because there are different Precursor Concentrations specified for the label {0}.. /// </summary> public static string CalibrationCurveFitter_GetCalibrationCurve_Unable_to_calculate_the_calibration_curve_for_the_because_there_are_different_Precursor_Concentrations_specified_for_the_label__0__ { get { return ResourceManager.GetString("CalibrationCurveFitter_GetCalibrationCurve_Unable_to_calculate_the_calibration_cu" + "rve_for_the_because_there_are_different_Precursor_Concentrations_specified_for_t" + "he_label__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CiRT (discovered). /// </summary> public static string CalibrationGridViewDriver_CiRT_option_name { get { return ResourceManager.GetString("CalibrationGridViewDriver_CiRT_option_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculate from regression. /// </summary> public static string CalibrationGridViewDriver_FindEvenlySpacedPeptides_Calculate_from_regression { get { return ResourceManager.GetString("CalibrationGridViewDriver_FindEvenlySpacedPeptides_Calculate_from_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating scores. /// </summary> public static string CalibrationGridViewDriver_FindEvenlySpacedPeptides_Calculating_scores { get { return ResourceManager.GetString("CalibrationGridViewDriver_FindEvenlySpacedPeptides_Calculating_scores", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Predefined values. /// </summary> public static string CalibrationGridViewDriver_FindEvenlySpacedPeptides_Predefined_values { get { return ResourceManager.GetString("CalibrationGridViewDriver_FindEvenlySpacedPeptides_Predefined_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This document contains {0} CiRT peptides. Would you like to use {1} of them as your iRT standards?. /// </summary> public static string CalibrationGridViewDriver_FindEvenlySpacedPeptides_This_document_contains__0__CiRT_peptides__Would_you_like_to_use__1__of_them_as_your_iRT_standards_ { get { return ResourceManager.GetString("CalibrationGridViewDriver_FindEvenlySpacedPeptides_This_document_contains__0__CiR" + "T_peptides__Would_you_like_to_use__1__of_them_as_your_iRT_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to use the predefined iRT values or calculate new iRT values based on the regression?. /// </summary> public static string CalibrationGridViewDriver_FindEvenlySpacedPeptides_Would_you_like_to_use_the_predefined_iRT_values_ { get { return ResourceManager.GetString("CalibrationGridViewDriver_FindEvenlySpacedPeptides_Would_you_like_to_use_the_pred" + "efined_iRT_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard peptides must exist in the database.. /// </summary> public static string ChangeIrtPeptidesDlg_OkDialog_Standard_peptides_must_exist_in_the_database { get { return ResourceManager.GetString("ChangeIrtPeptidesDlg_OkDialog_Standard_peptides_must_exist_in_the_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptides were removed:. /// </summary> public static string ChangeIrtPeptidesDlg_OkDialog_The_following_peptides_were_removed_ { get { return ResourceManager.GetString("ChangeIrtPeptidesDlg_OkDialog_The_following_peptides_were_removed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following sequences are not currently in the database:. /// </summary> public static string ChangeIrtPeptidesDlg_OkDialog_The_following_sequences_are_not_currently_in_the_database { get { return ResourceManager.GetString("ChangeIrtPeptidesDlg_OkDialog_The_following_sequences_are_not_currently_in_the_da" + "tabase", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence &apos;{0}&apos; is not currently in the database.. /// </summary> public static string ChangeIrtPeptidesDlg_OkDialog_The_sequence__0__is_not_currently_in_the_database { get { return ResourceManager.GetString("ChangeIrtPeptidesDlg_OkDialog_The_sequence__0__is_not_currently_in_the_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to remove them from the document?. /// </summary> public static string ChangeIrtPeptidesDlg_OkDialog_Would_you_like_to_remove_them_from_the_document_ { get { return ResourceManager.GetString("ChangeIrtPeptidesDlg_OkDialog_Would_you_like_to_remove_them_from_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change Annotation Settings. /// </summary> public static string ChooseAnnotationsDlg_OkDialog_Change_Annotation_Settings { get { return ResourceManager.GetString("ChooseAnnotationsDlg_OkDialog_Change_Annotation_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Transition List (iRT standards). /// </summary> public static string ChooseIrtStandardPeptides_ImportTextFile_Import_Transition_List__iRT_standards_ { get { return ResourceManager.GetString("ChooseIrtStandardPeptides_ImportTextFile_Import_Transition_List__iRT_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition List. /// </summary> public static string ChooseIrtStandardPeptides_ImportTextFile_Transition_List { get { return ResourceManager.GetString("ChooseIrtStandardPeptides_ImportTextFile_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition list field must contain a path to a valid file.. /// </summary> public static string ChooseIrtStandardPeptides_OkDialog_Transition_list_field_must_contain_a_path_to_a_valid_file_ { get { return ResourceManager.GetString("ChooseIrtStandardPeptides_OkDialog_Transition_list_field_must_contain_a_path_to_a" + "_valid_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Known iRTs. /// </summary> public static string ChooseIrtStandardPeptidesDlg_OkDialog_Known_iRTs { get { return ResourceManager.GetString("ChooseIrtStandardPeptidesDlg_OkDialog_Known_iRTs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library iRTs. /// </summary> public static string ChooseIrtStandardPeptidesDlg_OkDialog_Library_iRTs { get { return ResourceManager.GetString("ChooseIrtStandardPeptidesDlg_OkDialog_Library_iRTs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Linear regression. /// </summary> public static string ChooseIrtStandardPeptidesDlg_OkDialog_Linear_regression { get { return ResourceManager.GetString("ChooseIrtStandardPeptidesDlg_OkDialog_Linear_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a protein containing the list of standard peptides for the iRT calculator.. /// </summary> public static string ChooseIrtStandardPeptidesDlg_OkDialog_Please_select_a_protein_containing_the_list_of_standard_peptides_for_the_iRT_calculator_ { get { return ResourceManager.GetString("ChooseIrtStandardPeptidesDlg_OkDialog_Please_select_a_protein_containing_the_list" + "_of_standard_peptides_for_the_iRT_calculator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose retention time filter replicates. /// </summary> public static string ChooseSchedulingReplicatesDlg_btnOk_Click_Choose_retention_time_filter_replicates { get { return ResourceManager.GetString("ChooseSchedulingReplicatesDlg_btnOk_Click_Choose_retention_time_filter_replicates" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The set of replicates in this document has changed. Please choose again which replicates to use for the retention time filter.. /// </summary> public static string ChooseSchedulingReplicatesDlg_btnOk_Click_The_set_of_replicates_in_this_document_has_changed___Please_choose_again_which_replicates_to_use_for_the_retention_time_filter_ { get { return ResourceManager.GetString("ChooseSchedulingReplicatesDlg_btnOk_Click_The_set_of_replicates_in_this_document_" + "has_changed___Please_choose_again_which_replicates_to_use_for_the_retention_time" + "_filter_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must choose at least one replicate. /// </summary> public static string ChooseSchedulingReplicatesDlg_btnOk_Click_You_must_choose_at_least_one_replicate { get { return ResourceManager.GetString("ChooseSchedulingReplicatesDlg_btnOk_Click_You_must_choose_at_least_one_replicate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0} cache. /// </summary> public static string ChromatogramCache_Load_Loading__0__cache { get { return ResourceManager.GetString("ChromatogramCache_Load_Loading__0__cache", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure trying to read scan IDs. /// </summary> public static string ChromatogramCache_LoadScanIdBytes_Failure_trying_to_read_scan_IDs { get { return ResourceManager.GetString("ChromatogramCache_LoadScanIdBytes_Failure_trying_to_read_scan_IDs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file appears to be corrupted and cannot be read. ///It is recommended that you delete this file so that Skyline can create a new file by again extracting chromatograms from the raw data files.. /// </summary> public static string ChromatogramCache_LoadStructs_FileCorrupted { get { return ResourceManager.GetString("ChromatogramCache_LoadStructs_FileCorrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please check for a newer release.. /// </summary> public static string ChromatogramCache_LoadStructs_Please_check_for_a_newer_release_ { get { return ResourceManager.GetString("ChromatogramCache_LoadStructs_Please_check_for_a_newer_release_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The SKYD file format {0} is not supported by Skyline {1}.. /// </summary> public static string ChromatogramCache_LoadStructs_The_SKYD_file_format__0__is_not_supported_by_Skyline__1__ { get { return ResourceManager.GetString("ChromatogramCache_LoadStructs_The_SKYD_file_format__0__is_not_supported_by_Skylin" + "e__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data truncation in cache header. File may be corrupted.. /// </summary> public static string ChromatogramCache_ReadComplete_Data_truncation_in_cache_header_File_may_be_corrupted { get { return ResourceManager.GetString("ChromatogramCache_ReadComplete_Data_truncation_in_cache_header_File_may_be_corrup" + "ted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure writing cache. Specified {0} peaks exceed total peak count {1}. /// </summary> public static string ChromatogramCache_WriteStructs_Failure_writing_cache___Specified__0__peaks_exceed_total_peak_count__1_ { get { return ResourceManager.GetString("ChromatogramCache_WriteStructs_Failure_writing_cache___Specified__0__peaks_exceed" + "_total_peak_count__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This import appears to be taking longer than expected. If importing from a network drive, consider canceling this import, copying to local disk and retrying.. /// </summary> public static string ChromatogramDataProvider_GetChromatogram_This_import_appears_to_be_taking_longer_than_expected__If_importing_from_a_network_drive__consider_canceling_this_import__copying_to_local_disk_and_retrying_ { get { return ResourceManager.GetString("ChromatogramDataProvider_GetChromatogram_This_import_appears_to_be_taking_longer_" + "than_expected__If_importing_from_a_network_drive__consider_canceling_this_import" + "__copying_to_local_disk_and_retrying_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Bad chromatogram data for charge {0} state of peptide {1}. /// </summary> public static string ChromatogramExporter_Export_Bad_chromatogram_data_for_charge__0__state_of_peptide__1_ { get { return ResourceManager.GetString("ChromatogramExporter_Export_Bad_chromatogram_data_for_charge__0__state_of_peptide" + "__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Corrupted chromatogram data at charge {0} state of peptide {1}. /// </summary> public static string ChromatogramExporter_Export_Corrupted_chromatogram_data_at_charge__0__state_of_peptide__1_ { get { return ResourceManager.GetString("ChromatogramExporter_Export_Corrupted_chromatogram_data_at_charge__0__state_of_pe" + "ptide__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Chromatograms for {0}. /// </summary> public static string ChromatogramExporter_Export_Exporting_Chromatograms_for__0_ { get { return ResourceManager.GetString("ChromatogramExporter_Export_Exporting_Chromatograms_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to One or more missing chromatograms at charge state {0} of {1}. /// </summary> public static string ChromatogramExporter_ExportGroupNode_One_or_more_missing_chromatograms_at_charge_state__0__of__1_ { get { return ResourceManager.GetString("ChromatogramExporter_ExportGroupNode_One_or_more_missing_chromatograms_at_charge_" + "state__0__of__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid extractor name.. /// </summary> public static string ChromatogramExporter_GetExtractorName_Invalid_extractor_name_ { get { return ResourceManager.GetString("ChromatogramExporter_GetExtractorName_Invalid_extractor_name_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure trying to read points. /// </summary> public static string ChromatogramGroupInfo_ReadChromatogram_Failure_trying_to_read_points { get { return ResourceManager.GetString("ChromatogramGroupInfo_ReadChromatogram_Failure_trying_to_read_points", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The index {0} must be between 0 and {1}. /// </summary> public static string ChromatogramInfo_ChromatogramInfo_The_index__0__must_be_between_0_and__1__ { get { return ResourceManager.GetString("ChromatogramInfo_ChromatogramInfo_The_index__0__must_be_between_0_and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chromatogram Libraries. /// </summary> public static string ChromatogramLibrary_FILTER_CLIB_Chromatogram_Libraries { get { return ResourceManager.GetString("ChromatogramLibrary_FILTER_CLIB_Chromatogram_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0}. /// </summary> public static string ChromatogramLibrary_Load_Loading__0_ { get { return ResourceManager.GetString("ChromatogramLibrary_Load_Loading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exception reading cache:{0}. /// </summary> public static string ChromatogramLibrary_LoadFromCache_Exception_reading_cache__0_ { get { return ResourceManager.GetString("ChromatogramLibrary_LoadFromCache_Exception_reading_cache__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error loading chromatogram library:{0}. /// </summary> public static string ChromatogramLibrary_LoadLibraryFromDatabase_Error_loading_chromatogram_library__0_ { get { return ResourceManager.GetString("ChromatogramLibrary_LoadLibraryFromDatabase_Error_loading_chromatogram_library__0" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading precursors from {0}. /// </summary> public static string ChromatogramLibrary_LoadLibraryFromDatabase_Reading_precursors_from__0_ { get { return ResourceManager.GetString("ChromatogramLibrary_LoadLibraryFromDatabase_Reading_precursors_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Area. /// </summary> public static string ChromatogramLibrarySpec_PEPTIDE_RANK_PEAK_AREA_Peak_Area { get { return ResourceManager.GetString("ChromatogramLibrarySpec_PEPTIDE_RANK_PEAK_AREA_Peak_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempting to save results info for a file that cannot be found.. /// </summary> public static string ChromatogramSet_GetOrdinalSaveId_Attempting_to_save_results_info_for_a_file_that_cannot_be_found { get { return ResourceManager.GetString("ChromatogramSet_GetOrdinalSaveId_Attempting_to_save_results_info_for_a_file_that_" + "cannot_be_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to serialize list containing invalid type.. /// </summary> public static string ChromatogramSet_WriteXml_Attempt_to_serialize_list_containing_invalid_type { get { return ResourceManager.GetString("ChromatogramSet_WriteXml_Attempt_to_serialize_list_containing_invalid_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This document contains only negative ion mode transitions, and the imported file contains only positive ion mode data so nothing can be loaded.. /// </summary> public static string ChromCacheBuilder_BuildCache_This_document_contains_only_negative_ion_mode_transitions__and_the_imported_file_contains_only_positive_ion_mode_data_so_nothing_can_be_loaded_ { get { return ResourceManager.GetString("ChromCacheBuilder_BuildCache_This_document_contains_only_negative_ion_mode_transi" + "tions__and_the_imported_file_contains_only_positive_ion_mode_data_so_nothing_can" + "_be_loaded_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This document contains only positive ion mode transitions, and the imported file contains only negative ion mode data so nothing can be loaded. Negative ion mode transitions need to have negative charge values.. /// </summary> public static string ChromCacheBuilder_BuildCache_This_document_contains_only_positive_ion_mode_transitions__and_the_imported_file_contains_only_negative_ion_mode_data_so_nothing_can_be_loaded___Negative_ion_mode_transitions_need_to_have_negative_charge_values_ { get { return ResourceManager.GetString("ChromCacheBuilder_BuildCache_This_document_contains_only_positive_ion_mode_transi" + "tions__and_the_imported_file_contains_only_negative_ion_mode_data_so_nothing_can" + "_be_loaded___Negative_ion_mode_transitions_need_to_have_negative_charge_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing {0}. /// </summary> public static string ChromCacheBuilder_BuildNextFileInner_Importing__0__ { get { return ResourceManager.GetString("ChromCacheBuilder_BuildNextFileInner_Importing__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Recalculating scores for {0}. /// </summary> public static string ChromCacheBuilder_BuildNextFileInner_Recalculating_scores_for__0_ { get { return ResourceManager.GetString("ChromCacheBuilder_BuildNextFileInner_Recalculating_scores_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string ChromCacheBuilder_BuildNextFileInner_The_file__0__does_not_exist { get { return ResourceManager.GetString("ChromCacheBuilder_BuildNextFileInner_The_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sample {0} contains no usable data.. /// </summary> public static string ChromCacheBuilder_BuildNextFileInner_The_sample__0__contains_no_usable_data { get { return ResourceManager.GetString("ChromCacheBuilder_BuildNextFileInner_The_sample__0__contains_no_usable_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path &apos;{0}&apos; was not found among previously imported results.. /// </summary> public static string ChromCacheBuilder_GetRecalcFileBuildInfo_The_path___0___was_not_found_among_previously_imported_results_ { get { return ResourceManager.GetString("ChromCacheBuilder_GetRecalcFileBuildInfo_The_path___0___was_not_found_among_previ" + "ously_imported_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to finish importing chromatograms because the retention time predictor linear regression failed.. /// </summary> public static string ChromCacheBuilder_Read_Unable_to_finish_importing_chromatograms_because_the_retention_time_predictor_linear_regression_failed_ { get { return ResourceManager.GetString("ChromCacheBuilder_Read_Unable_to_finish_importing_chromatograms_because_the_reten" + "tion_time_predictor_linear_regression_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Existing write threads: {0}. /// </summary> public static string ChromCacheBuilder_WriteLoop_Existing_write_threads___0_ { get { return ResourceManager.GetString("ChromCacheBuilder_WriteLoop_Existing_write_threads___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure writing cache file.. /// </summary> public static string ChromCacheBuilder_WriteLoop_Failure_writing_cache_file { get { return ResourceManager.GetString("ChromCacheBuilder_WriteLoop_Failure_writing_cache_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transitions of the same precursor found with different peak counts {0} and {1}. /// </summary> public static string ChromCacheBuilder_WriteLoop_Transitions_of_the_same_precursor_found_with_different_peak_counts__0__and__1__ { get { return ResourceManager.GetString("ChromCacheBuilder_WriteLoop_Transitions_of_the_same_precursor_found_with_differen" + "t_peak_counts__0__and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed importing results file &apos;{0}&apos;.. /// </summary> public static string ChromCacheBuildException_GetMessage_Failed_importing_results_file___0___ { get { return ResourceManager.GetString("ChromCacheBuildException_GetMessage_Failed_importing_results_file___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed importing results file &apos;{0}&apos;, sample {1}.. /// </summary> public static string ChromCacheBuildException_GetMessage_Failed_importing_results_file___0____sample__1__ { get { return ResourceManager.GetString("ChromCacheBuildException_GetMessage_Failed_importing_results_file___0____sample__" + "1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected end of file in {0}.. /// </summary> public static string ChromCacheJoiner_FinishRead_Unexpected_end_of_file_in__0__ { get { return ResourceManager.GetString("ChromCacheJoiner_FinishRead_Unexpected_end_of_file_in__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to create cache &apos;{0}&apos;.. /// </summary> public static string ChromCacheJoiner_JoinNextPart_Failed_to_create_cache__0__ { get { return ResourceManager.GetString("ChromCacheJoiner_JoinNextPart_Failed_to_create_cache__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Joining file {0}. /// </summary> public static string ChromCacheJoiner_JoinNextPart_Joining_file__0__ { get { return ResourceManager.GetString("ChromCacheJoiner_JoinNextPart_Joining_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to write the file {0}. /// </summary> public static string ChromCacheWriter_Complete_Failure_attempting_to_write_the_file__0_ { get { return ResourceManager.GetString("ChromCacheWriter_Complete_Failure_attempting_to_write_the_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to minutes. /// </summary> public static string ChromChartPropertyDlg_cbRelative_CheckedChanged_minutes { get { return ResourceManager.GetString("ChromChartPropertyDlg_cbRelative_CheckedChanged_minutes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to widths. /// </summary> public static string ChromChartPropertyDlg_cbRelative_CheckedChanged_widths { get { return ResourceManager.GetString("ChromChartPropertyDlg_cbRelative_CheckedChanged_widths", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Times ({0}) and intensities ({1}) disagree in point count.. /// </summary> public static string ChromCollected_ChromCollected_Times__0__and_intensities__1__disagree_in_point_count { get { return ResourceManager.GetString("ChromCollected_ChromCollected_Times__0__and_intensities__1__disagree_in_point_cou" + "nt", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Intensities ({0}) and mass errors ({1}) disagree in point count.. /// </summary> public static string ChromCollector_ReleaseChromatogram_Intensities___0___and_mass_errors___1___disagree_in_point_count_ { get { return ResourceManager.GetString("ChromCollector_ReleaseChromatogram_Intensities___0___and_mass_errors___1___disagr" + "ee_in_point_count_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The time interval {0} to {1} is not valid.. /// </summary> public static string ChromDataSet_GetExtents_The_time_interval__0__to__1__is_not_valid { get { return ResourceManager.GetString("ChromDataSet_GetExtents_The_time_interval__0__to__1__is_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Incorrectly sorted chromatograms {0} &gt; {1}. /// </summary> public static string ChromDataSet_MarkOptimizationData_Incorrectly_sorted_chromatograms__0__1__ { get { return ResourceManager.GetString("ChromDataSet_MarkOptimizationData_Incorrectly_sorted_chromatograms__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected null peak. /// </summary> public static string ChromDataSet_MergePeaks_Unexpected_null_peak { get { return ResourceManager.GetString("ChromDataSet_MergePeaks_Unexpected_null_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to empty. /// </summary> public static string ChromDataSet_ToString_empty { get { return ResourceManager.GetString("ChromDataSet_ToString_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit. /// </summary> public static string ChromGraphItem_AddAnnotations_Explicit { get { return ResourceManager.GetString("ChromGraphItem_AddAnnotations_Explicit", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ID. /// </summary> public static string ChromGraphItem_AddAnnotations_ID { get { return ResourceManager.GetString("ChromGraphItem_AddAnnotations_ID", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Predicted. /// </summary> public static string ChromGraphItem_AddAnnotations_Predicted { get { return ResourceManager.GetString("ChromGraphItem_AddAnnotations_Predicted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} - base peak. /// </summary> public static string ChromGraphItem_Title__0____base_peak { get { return ResourceManager.GetString("ChromGraphItem_Title__0____base_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} - TIC. /// </summary> public static string ChromGraphItem_Title__0____TIC { get { return ResourceManager.GetString("ChromGraphItem_Title__0____TIC", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Step {0}. /// </summary> public static string ChromGraphItem_Title_Step__0_ { get { return ResourceManager.GetString("ChromGraphItem_Title_Step__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid chromatogram ID {0} found. Failure parsing m/z values.. /// </summary> public static string ChromKey_FromId_Invalid_chromatogram_ID__0__found_Failure_parsing_mz_values { get { return ResourceManager.GetString("ChromKey_FromId_Invalid_chromatogram_ID__0__found_Failure_parsing_mz_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid chromatogram ID {0} found. The ID must include both precursor and product m/z values.. /// </summary> public static string ChromKey_FromId_Invalid_chromatogram_ID__0__found_The_ID_must_include_both_precursor_and_product_mz_values { get { return ResourceManager.GetString("ChromKey_FromId_Invalid_chromatogram_ID__0__found_The_ID_must_include_both_precur" + "sor_and_product_mz_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not a valid chromatogram ID.. /// </summary> public static string ChromKey_FromId_The_value__0__is_not_a_valid_chromatogram_ID { get { return ResourceManager.GetString("ChromKey_FromId_The_value__0__is_not_a_valid_chromatogram_ID", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ChromLib { get { object obj = ResourceManager.GetObject("ChromLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to ClipboardEx implementation problem. /// </summary> public static string ClipboardEx_GetData_ClipboardEx_implementation_problem { get { return ResourceManager.GetString("ClipboardEx_GetData_ClipboardEx_implementation_problem", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed setting data to the clipboard.. /// </summary> public static string ClipboardHelper_GetCopyErrorMessage_Failed_setting_data_to_clipboard_ { get { return ResourceManager.GetString("ClipboardHelper_GetCopyErrorMessage_Failed_setting_data_to_clipboard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The process &apos;{0}&apos; (ID = {1}) has the clipboard open.. /// </summary> public static string ClipboardHelper_GetOpenClipboardMessage_The_process__0__ID__1__has_the_clipboard_open { get { return ResourceManager.GetString("ClipboardHelper_GetOpenClipboardMessage_The_process__0__ID__1__has_the_clipboard_" + "open", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed getting data from the clipboard.. /// </summary> public static string ClipboardHelper_GetPasteErrorMessage_Failed_getting_data_from_the_clipboard_ { get { return ResourceManager.GetString("ClipboardHelper_GetPasteErrorMessage_Failed_getting_data_from_the_clipboard_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Collapse { get { object obj = ResourceManager.GetObject("Collapse", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Adding ion mobility data from {0}. /// </summary> public static string CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_Adding_ion_mobility_data_from__0_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_Adding_ion_mobility_data" + "_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding Spectral Library. /// </summary> public static string CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_Adding_Spectral_Library { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_Adding_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to load the library file {0}.. /// </summary> public static string CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_An_error_occurred_attempting_to_load_the_library_file__0__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_An_error_occurred_attemp" + "ting_to_load_the_library_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library {0} does not contain ion mobility information.. /// </summary> public static string CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_The_library__0__does_not_contain_ion_mobility_information_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_AddSpectralLibrary_The_library__0__does_not" + "_contain_ion_mobility_information_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading ion mobility data from {0}.. /// </summary> public static string CollisionalCrossSectionGridViewDriver_ProcessDriftTimes_Reading_ion_mobility_data_from__0__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_ProcessDriftTimes_Reading_ion_mobility_data" + "_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading ion mobility information. /// </summary> public static string CollisionalCrossSectionGridViewDriver_ProcessIonMobilityValues_Reading_ion_mobility_information { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriver_ProcessIonMobilityValues_Reading_ion_mobili" + "ty_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence {0} is already present in the list.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_DoRowValidating_The_sequence__0__is_already_present_in_the_list_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_DoRowValidating_The_sequence__0__is_alr" + "eady_present_in_the_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid charge. Precursor charges must be integers with absolute value between 1 and {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow__0__is_not_a_valid_charge__Precursor_charges_must_be_integers_with_absolute_value_between_1_and__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow__0__is_not_a_valid_charge__" + "Precursor_charges_must_be_integers_with_absolute_value_between_1_and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot read high energy ion mobility offset value &quot;{0}&quot; on line {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Cannot_read_high_energy_ion_mobility_offset_value___0___on_line__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Cannot_read_high_energy_ion" + "_mobility_offset_value___0___on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not parse adduct description &quot;{0}&quot; on line {1}. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Could_not_parse_adduct_description___0___on_line__1_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Could_not_parse_adduct_desc" + "ription___0___on_line__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid number format {0} for collisional cross section on line {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Invalid_number_format__0__for_collisional_cross_section_on_line__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Invalid_number_format__0__f" + "or_collisional_cross_section_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid number format {0} for ion mobility on line {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Invalid_number_format__0__for_ion_mobility_on_line__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Invalid_number_format__0__f" + "or_ion_mobility_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing adduct description on line {0}. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_adduct_description_on_line__0_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_adduct_description_" + "on_line__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing collisional cross section value on line {0}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_collisional_cross_section_value_on_line__0__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_collisional_cross_s" + "ection_value_on_line__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing ion mobility value on line {0}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_ion_mobility_value_on_line__0__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_ion_mobility_value_" + "on_line__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing peptide sequence on line {0}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_peptide_sequence_on_line__0__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Missing_peptide_sequence_on" + "_line__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Supported units include: {0}. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Supported_units_include___0_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Supported_units_include___0" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The collisional cross section {0} must be greater than zero on line {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_collisional_cross_section__0__must_be_greater_than_zero_on_line__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_collisional_cross_secti" + "on__0__must_be_greater_than_zero_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion mobility value &quot;{0}&quot; on line {1} must be greater than zero. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_ion_mobility_value___0___on_line__1__must_be_greater_than_zero { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_ion_mobility_value___0_" + "__on_line__1__must_be_greater_than_zero", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The pasted text must at a minimum contain columns for peptide and adduct, along with collisional cross section and/or ion mobility.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_pasted_text_must_at_a_minimum_contain_columns_for_peptide_and_adduct__along_with_collisional_cross_section_and_or_ion_mobility_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_pasted_text_must_at_a_m" + "inimum_contain_columns_for_peptide_and_adduct__along_with_collisional_cross_sect" + "ion_and_or_ion_mobility_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text {0} is not a valid peptide sequence on line {1}.. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_text__0__is_not_a_valid_peptide_sequence_on_line__1__ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_The_text__0__is_not_a_valid" + "_peptide_sequence_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized ion mobility units &quot;{0}&quot; on line {1}. /// </summary> public static string CollisionalCrossSectionGridViewDriverBase_ValidateRow_Unrecognized_ion_mobility_units___0___on_line__1_ { get { return ResourceManager.GetString("CollisionalCrossSectionGridViewDriverBase_ValidateRow_Unrecognized_ion_mobility_u" + "nits___0___on_line__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Collision Energy Regression:. /// </summary> public static string CollisionEnergyList_Label_Collision_Energy_Regression { get { return ResourceManager.GetString("CollisionEnergyList_Label_Collision_Energy_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Collision Energy Regressions. /// </summary> public static string CollisionEnergyList_Title_Edit_Collision_Energy_Regressions { get { return ResourceManager.GetString("CollisionEnergyList_Title_Edit_Collision_Energy_Regressions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision energy regression contains multiple coefficients for charge {0}.. /// </summary> public static string CollisionEnergyRegression_Validate_Collision_energy_regression_contains_multiple_coefficients_for_charge__0__ { get { return ResourceManager.GetString("CollisionEnergyRegression_Validate_Collision_energy_regression_contains_multiple_" + "coefficients_for_charge__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision energy regressions require at least one regression function.. /// </summary> public static string CollisionEnergyRegression_Validate_Collision_energy_regressions_require_at_least_one_regression_function { get { return ResourceManager.GetString("CollisionEnergyRegression_Validate_Collision_energy_regressions_require_at_least_" + "one_regression_function", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline classic. /// </summary> public static string ColorSchemeList_DEFAULT_Skyline_classic { get { return ResourceManager.GetString("ColorSchemeList_DEFAULT_Skyline_classic", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Distinct. /// </summary> public static string ColorSchemeList_GetDefaults_Distinct { get { return ResourceManager.GetString("ColorSchemeList_GetDefaults_Distinct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Eggplant lemonade. /// </summary> public static string ColorSchemeList_GetDefaults_Eggplant_lemonade { get { return ResourceManager.GetString("ColorSchemeList_GetDefaults_Eggplant_lemonade", resourceCulture); } } /// <summary> /// Looks up a localized string similar to High contrast. /// </summary> public static string ColorSchemeList_GetDefaults_High_contrast { get { return ResourceManager.GetString("ColorSchemeList_GetDefaults_High_contrast", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Duplicate column &apos;{0}&apos;. /// </summary> public static string Columns_Columns_Duplicate_column___0__ { get { return ResourceManager.GetString("Columns_Columns_Duplicate_column___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing column &apos;{0}&apos;. /// </summary> public static string Columns_Columns_Missing_column___0__ { get { return ResourceManager.GetString("Columns_Columns_Missing_column___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized column &apos;{0}&apos;. /// </summary> public static string Columns_Columns_Unrecognized_column___0__ { get { return ResourceManager.GetString("Columns_Columns_Unrecognized_column___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find table for {0}. /// </summary> public static string ColumnSet_GetColumnInfos_Unable_to_find_table_for__0_ { get { return ResourceManager.GetString("ColumnSet_GetColumnInfos_Unable_to_find_table_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected report type. /// </summary> public static string ColumnSet_GetColumnInfos_Unexpected_report_type { get { return ResourceManager.GetString("ColumnSet_GetColumnInfos_Unexpected_report_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string ColumnSet_GetTransitionsTable_Peptide { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string ColumnSet_GetTransitionsTable_Peptides { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor. /// </summary> public static string ColumnSet_GetTransitionsTable_Precursor { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursors. /// </summary> public static string ColumnSet_GetTransitionsTable_Precursors { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein. /// </summary> public static string ColumnSet_GetTransitionsTable_Protein { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition. /// </summary> public static string ColumnSet_GetTransitionsTable_Transition { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Transition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transitions. /// </summary> public static string ColumnSet_GetTransitionsTable_Transitions { get { return ResourceManager.GetString("ColumnSet_GetTransitionsTable_Transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The dwell time {0} must be between {1} and {2}.. /// </summary> public static string CommandArgs_DwellTime_The_dwell_time__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("CommandArgs_DwellTime_The_dwell_time__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The arguments {0} and {1} options cannot be used together.. /// </summary> public static string CommandArgs_ErrorArgsExclusiveText_Error__The_arguments__0__and__1__options_cannot_be_used_together_ { get { return ResourceManager.GetString("CommandArgs_ErrorArgsExclusiveText_Error__The_arguments__0__and__1__options_canno" + "t_be_used_together_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The arguments below can be used to install tools onto the Tools menu and do not rely on the &apos;--in&apos; argument because they independent of a specific Skyline document.. /// </summary> public static string CommandArgs_GROUP_TOOLS_The_arguments_below_can_be_used_to_install_tools_onto_the_Tools_menu_and_do_not_rely_on_the____in__argument_because_they_independent_of_a_specific_Skyline_document_ { get { return ResourceManager.GetString("CommandArgs_GROUP_TOOLS_The_arguments_below_can_be_used_to_install_tools_onto_the" + "_Tools_menu_and_do_not_rely_on_the____in__argument_because_they_independent_of_a" + "_specific_Skyline_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tools Installation. /// </summary> public static string CommandArgs_GROUP_TOOLS_Tools_Installation { get { return ResourceManager.GetString("CommandArgs_GROUP_TOOLS_Tools_Installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument type {0} is not valid for isolation list export. /// </summary> public static string CommandArgs_IsolationListInstrumentType_The_instrument_type__0__is_not_valid_for_isolation_list_export { get { return ResourceManager.GetString("CommandArgs_IsolationListInstrumentType_The_instrument_type__0__is_not_valid_for_" + "isolation_list_export", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument type {0} is not valid for method export. /// </summary> public static string CommandArgs_MethodInstrumentType_The_instrument_type__0__is_not_valid_for_method_export { get { return ResourceManager.GetString("CommandArgs_MethodInstrumentType_The_instrument_type__0__is_not_valid_for_method_" + "export", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a value for the following argument required to upload the document to Panorma: ///{0}. /// </summary> public static string CommandArgs_PanoramaArgsComplete_ { get { return ResourceManager.GetString("CommandArgs_PanoramaArgsComplete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a value for the following arguments required to upload the document to Panorma: ///{0}. /// </summary> public static string CommandArgs_PanoramaArgsComplete_plural_ { get { return ResourceManager.GetString("CommandArgs_PanoramaArgsComplete_plural_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} or &apos;{1}&apos;. /// </summary> public static string CommandArgs_ParseArgsInternal______0__or___1__ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal______0__or___1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Defaulting to none.. /// </summary> public static string CommandArgs_ParseArgsInternal_Defaulting_to_none_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Defaulting_to_none_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Defaulting to standard.. /// </summary> public static string CommandArgs_ParseArgsInternal_Defaulting_to_standard_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Defaulting_to_standard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: &quot;{0}&quot; is not a valid value for {1}. It must be one of the following: {2}. /// </summary> public static string CommandArgs_ParseArgsInternal_Error____0___is_not_a_valid_value_for__1___It_must_be_one_of_the_following___2_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error____0___is_not_a_valid_value_for__1___It_must_" + "be_one_of_the_following___2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Attempting to exclude an unknown feature name &apos;{0}&apos;. Try one of the following:. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__Attempting_to_exclude_an_unknown_feature_name___0____Try_one_of_the_following_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__Attempting_to_exclude_an_unknown_feature_nam" + "e___0____Try_one_of_the_following_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Regular expression &apos;{0}&apos; does not have any groups. One group is required. The part of the file or sub-directory name that matches the first group in the regular expression is used as the replicate name.. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__Regular_expression___0___does_not_have_any_groups___String { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__Regular_expression___0___does_not_have_any_g" + "roups___String", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Regular expression {0} cannot be parsed.. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__Regular_expression__0__cannot_be_parsed_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__Regular_expression__0__cannot_be_parsed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The specified working directory {0} does not exist.. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__The_specified_working_directory__0__does_not_exist_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__The_specified_working_directory__0__does_not" + "_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Unexpected argument --{0}. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__Unexpected_argument____0_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__Unexpected_argument____0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Use --in to specify a Skyline document to open.. /// </summary> public static string CommandArgs_ParseArgsInternal_Error__Use___in_to_specify_a_Skyline_document_to_open_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Error__Use___in_to_specify_a_Skyline_document_to_op" + "en_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to It must be a number. Defaulting to {0}.. /// </summary> public static string CommandArgs_ParseArgsInternal_It_must_be_a_number__Defaulting_to__0__ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_It_must_be_a_number__Defaulting_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No isolation list will be exported.. /// </summary> public static string CommandArgs_ParseArgsInternal_No_isolation_list_will_be_exported_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_No_isolation_list_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No method will be exported.. /// </summary> public static string CommandArgs_ParseArgsInternal_No_method_will_be_exported_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_No_method_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No transition list will be exported.. /// </summary> public static string CommandArgs_ParseArgsInternal_No_transition_list_will_be_exported_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_No_transition_list_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Incorrect Usage of the --tool-program-macro command.. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__Incorrect_Usage_of_the___tool_program_macro_command_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__Incorrect_Usage_of_the___tool_program_macr" + "o_command_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Invalid max transitions per injection parameter ({0}).. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__Invalid_max_transitions_per_injection_parameter___0___ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__Invalid_max_transitions_per_injection_para" + "meter___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Invalid optimization parameter ({0}). Use &quot;ce&quot;, &quot;dp&quot;, or &quot;none&quot;.. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__Invalid_optimization_parameter___0____Use__ce____dp___or__none__ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__Invalid_optimization_parameter___0____Use_" + "_ce____dp___or__none__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The export strategy {0} is not valid. It must be one of the following: &quot;single&quot;, &quot;protein&quot; or &quot;buckets&quot;. Defaulting to single.. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__The_export_strategy__0__is_not_valid__It_must_be_one_of_the_following___string { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__The_export_strategy__0__is_not_valid__It_m" + "ust_be_one_of_the_following___string", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The instrument type {0} is not valid. Please choose from:. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__The_instrument_type__0__is_not_valid__Please_choose_from_ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__The_instrument_type__0__is_not_valid__Plea" + "se_choose_from_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The method type {0} is invalid. It must be one of the following: &quot;standard&quot;, &quot;scheduled&quot; or &quot;triggered&quot;.. /// </summary> public static string CommandArgs_ParseArgsInternal_Warning__The_method_type__0__is_invalid__It_must_be_one_of_the_following___standard____scheduled__or__triggered__ { get { return ResourceManager.GetString("CommandArgs_ParseArgsInternal_Warning__The_method_type__0__is_invalid__It_must_be" + "_one_of_the_following___standard____scheduled__or__triggered__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Regular expression &apos;{0}&apos; for {1} cannot be parsed.. /// </summary> public static string CommandArgs_ParseRegexArgument_Error__Regular_expression___0___for__1__cannot_be_parsed_ { get { return ResourceManager.GetString("CommandArgs_ParseRegexArgument_Error__Regular_expression___0___for__1__cannot_be_" + "parsed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The primary transition count {0} must be between {1} and {2}.. /// </summary> public static string CommandArgs_PrimaryTransitionCount_The_primary_transition_count__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("CommandArgs_PrimaryTransitionCount_The_primary_transition_count__0__must_be_betwe" + "en__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The run length {0} must be between {1} and {2}.. /// </summary> public static string CommandArgs_RunLength_The_run_length__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("CommandArgs_RunLength_The_run_length__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument parameter {0} is not valid for optimization.. /// </summary> public static string CommandArgs_ToOptimizeString_The_instrument_parameter__0__is_not_valid_for_optimization_ { get { return ResourceManager.GetString("CommandArgs_ToOptimizeString_The_instrument_parameter__0__is_not_valid_for_optimi" + "zation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument type {0} is not valid for transition list export. /// </summary> public static string CommandArgs_TransListInstrumentType_The_instrument_type__0__is_not_valid_for_transition_list_export { get { return ResourceManager.GetString("CommandArgs_TransListInstrumentType_The_instrument_type__0__is_not_valid_for_tran" + "sition_list_export", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Model cutoffs ({0}) must be in decreasing order greater than zero and less than {1}.. /// </summary> public static string CommandArgs_ValidateReintegrateArgs_Error__Model_cutoffs___0___must_be_in_decreasing_order_greater_than_zero_and_less_than__1__ { get { return ResourceManager.GetString("CommandArgs_ValidateReintegrateArgs_Error__Model_cutoffs___0___must_be_in_decreas" + "ing_order_greater_than_zero_and_less_than__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Model cutoffs cannot be applied in calibrating the Skyline default model.. /// </summary> public static string CommandArgs_ValidateReintegrateArgs_Error__Model_cutoffs_cannot_be_applied_in_calibrating_the_Skyline_default_model_ { get { return ResourceManager.GetString("CommandArgs_ValidateReintegrateArgs_Error__Model_cutoffs_cannot_be_applied_in_cal" + "ibrating_the_Skyline_default_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Use of the argument {0} requires the argument {1}. /// </summary> public static string CommandArgs_WarnArgRequirment_Warning__Use_of_the_argument__0__requires_the_argument__1_ { get { return ResourceManager.GetString("CommandArgs_WarnArgRequirment_Warning__Use_of_the_argument__0__requires_the_argum" + "ent__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Added {0} decoy peptides using &apos;{1}&apos; method. /// </summary> public static string CommandLine_AddDecoys_Added__0__decoy_peptides_using___1___method { get { return ResourceManager.GetString("CommandLine_AddDecoys_Added__0__decoy_peptides_using___1___method", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoys discarded. /// </summary> public static string CommandLine_AddDecoys_Decoys_discarded { get { return ResourceManager.GetString("CommandLine_AddDecoys_Decoys_discarded", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Attempting to add decoys to document with decoys.. /// </summary> public static string CommandLine_AddDecoys_Error__Attempting_to_add_decoys_to_document_with_decoys_ { get { return ResourceManager.GetString("CommandLine_AddDecoys_Error__Attempting_to_add_decoys_to_document_with_decoys_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The number of peptides {0} must be less than the number of peptide precursor models for decoys {1}, or use {2}={3} decoy generation method.. /// </summary> public static string CommandLine_AddDecoys_Error_The_number_of_peptides { get { return ResourceManager.GetString("CommandLine_AddDecoys_Error_The_number_of_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No files match the file name pattern &apos;{0}&apos;.. /// </summary> public static string CommandLine_ApplyFileAndSampleNameRegex_No_files_match_the_file_name_pattern___0___ { get { return ResourceManager.GetString("CommandLine_ApplyFileAndSampleNameRegex_No_files_match_the_file_name_pattern___0_" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No files match the sample name pattern &apos;{0}&apos;.. /// </summary> public static string CommandLine_ApplyFileAndSampleNameRegex_No_files_match_the_sample_name_pattern___0___ { get { return ResourceManager.GetString("CommandLine_ApplyFileAndSampleNameRegex_No_files_match_the_sample_name_pattern___" + "0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File name &apos;{0}&apos; does not match the pattern &apos;{1}&apos;. Ignoring {2}. /// </summary> public static string CommandLine_ApplyFileNameRegex_File_name___0___does_not_match_the_pattern___1____Ignoring__2_ { get { return ResourceManager.GetString("CommandLine_ApplyFileNameRegex_File_name___0___does_not_match_the_pattern___1____" + "Ignoring__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: {0} does not match the regular expression.. /// </summary> public static string CommandLine_ApplyNamingPattern_Error___0__does_not_match_the_regular_expression_ { get { return ResourceManager.GetString("CommandLine_ApplyNamingPattern_Error___0__does_not_match_the_regular_expression_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Duplicate replicate name &apos;{0}&apos; after applying regular expression.. /// </summary> public static string CommandLine_ApplyNamingPattern_Error__Duplicate_replicate_name___0___after_applying_regular_expression_ { get { return ResourceManager.GetString("CommandLine_ApplyNamingPattern_Error__Duplicate_replicate_name___0___after_applyi" + "ng_regular_expression_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Match to regular expression is empty for {0}.. /// </summary> public static string CommandLine_ApplyNamingPattern_Error__Match_to_regular_expression_is_empty_for__0__ { get { return ResourceManager.GetString("CommandLine_ApplyNamingPattern_Error__Match_to_regular_expression_is_empty_for__0" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File &apos;{0}&apos; does not have a sample. Cannot apply sample name pattern. Ignoring.. /// </summary> public static string CommandLine_ApplySampleNameRegex_File___0___does_not_have_a_sample__Cannot_apply_sample_name_pattern__Ignoring_ { get { return ResourceManager.GetString("CommandLine_ApplySampleNameRegex_File___0___does_not_have_a_sample__Cannot_apply_" + "sample_name_pattern__Ignoring_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sample name &apos;{0}&apos; does not match the pattern &apos;{1}&apos;. Ignoring {2}. /// </summary> public static string CommandLine_ApplySampleNameRegex_Sample_name___0___does_not_match_the_pattern___1____Ignoring__2_ { get { return ResourceManager.GetString("CommandLine_ApplySampleNameRegex_Sample_name___0___does_not_match_the_pattern___1" + "____Ignoring__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: File does not exist: {0}.. /// </summary> public static string CommandLine_CanReadFile_Error__File_does_not_exist___0__ { get { return ResourceManager.GetString("CommandLine_CanReadFile_Error__File_does_not_exist___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Replicate {0} in the document has an unexpected file {1}.. /// </summary> public static string CommandLine_CheckReplicateFiles_Error__Replicate__0__in_the_document_has_an_unexpected_file__1__ { get { return ResourceManager.GetString("CommandLine_CheckReplicateFiles_Error__Replicate__0__in_the_document_has_an_unexp" + "ected_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Could not find the spectral library {0} for this document.. /// </summary> public static string CommandLine_ConnectLibrarySpecs_Error__Could_not_find_the_spectral_library__0__for_this_document_ { get { return ResourceManager.GetString("CommandLine_ConnectLibrarySpecs_Error__Could_not_find_the_spectral_library__0__fo" + "r_this_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Could not find the spectral library {0}. /// </summary> public static string CommandLine_ConnectLibrarySpecs_Warning__Could_not_find_the_spectral_library__0_ { get { return ResourceManager.GetString("CommandLine_ConnectLibrarySpecs_Warning__Could_not_find_the_spectral_library__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Importing an assay library to a document without an iRT calculator cannot create {0}, because it exists.. /// </summary> public static string CommandLine_CreateIrtDatabase_Error__Importing_an_assay_library_to_a_document_without_an_iRT_calculator_cannot_create__0___because_it_exists_ { get { return ResourceManager.GetString("CommandLine_CreateIrtDatabase_Error__Importing_an_assay_library_to_a_document_wit" + "hout_an_iRT_calculator_cannot_create__0___because_it_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use the {0} argument to specify a file to create.. /// </summary> public static string CommandLine_CreateIrtDatabase_Use_the__0__argument_to_specify_a_file_to_create_ { get { return ResourceManager.GetString("CommandLine_CreateIrtDatabase_Use_the__0__argument_to_specify_a_file_to_create_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Creating scoring model {0}. /// </summary> public static string CommandLine_CreateScoringModel_Creating_scoring_model__0_ { get { return ResourceManager.GetString("CommandLine_CreateScoringModel_Creating_scoring_model__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed to create scoring model.. /// </summary> public static string CommandLine_CreateScoringModel_Error__Failed_to_create_scoring_model_ { get { return ResourceManager.GetString("CommandLine_CreateScoringModel_Error__Failed_to_create_scoring_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There are no decoy peptides in the document. Failed to create scoring model.. /// </summary> public static string CommandLine_CreateScoringModel_Error__There_are_no_decoy_peptides_in_the_document__Failed_to_create_scoring_model_ { get { return ResourceManager.GetString("CommandLine_CreateScoringModel_Error__There_are_no_decoy_peptides_in_the_document" + "__Failed_to_create_scoring_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Excluding feature score &apos;{0}&apos;. /// </summary> public static string CommandLine_CreateScoringModel_Excluding_feature_score___0__ { get { return ResourceManager.GetString("CommandLine_CreateScoringModel_Excluding_feature_score___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Excluding feature scores:. /// </summary> public static string CommandLine_CreateScoringModel_Excluding_feature_scores_ { get { return ResourceManager.GetString("CommandLine_CreateScoringModel_Excluding_feature_scores_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Excluding feature scores is not permitted with the default Skyline model.. /// </summary> public static string CommandLine_CreateUntrainedScoringModel_Error__Excluding_feature_scores_is_not_permitted_with_the_default_Skyline_model_ { get { return ResourceManager.GetString("CommandLine_CreateUntrainedScoringModel_Error__Excluding_feature_scores_is_not_pe" + "rmitted_with_the_default_Skyline_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chromatograms file {0} exported successfully.. /// </summary> public static string CommandLine_ExportChromatograms_Chromatograms_file__0__exported_successfully_ { get { return ResourceManager.GetString("CommandLine_ExportChromatograms_Chromatograms_file__0__exported_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: At least one chromatogram type must be selected. /// </summary> public static string CommandLine_ExportChromatograms_Error__At_least_one_chromatogram_type_must_be_selected { get { return ResourceManager.GetString("CommandLine_ExportChromatograms_Error__At_least_one_chromatogram_type_must_be_sel" + "ected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failure attempting to save chromatograms file {0}. /// </summary> public static string CommandLine_ExportChromatograms_Error__Failure_attempting_to_save_chromatograms_file__0_ { get { return ResourceManager.GetString("CommandLine_ExportChromatograms_Error__Failure_attempting_to_save_chromatograms_f" + "ile__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The document must have imported results. /// </summary> public static string CommandLine_ExportChromatograms_Error__The_document_must_have_imported_results { get { return ResourceManager.GetString("CommandLine_ExportChromatograms_Error__The_document_must_have_imported_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting chromatograms file {0}.... /// </summary> public static string CommandLine_ExportChromatograms_Exporting_chromatograms_file__0____ { get { return ResourceManager.GetString("CommandLine_ExportChromatograms_Exporting_chromatograms_file__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: You must specify an output file to write to with the --exp-file=path/to/file parameter. No transition list will be exported.. /// </summary> public static string CommandLine_ExportInstrumentFile_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: A template file is required to export a method.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__A_template_file_is_required_to_export_a_method_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__A_template_file_is_required_to_export_a_m" + "ethod_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the {0} instrument lacks support for direct method export for triggered acquisition.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the__0__instrument_lacks_support_for_direct_method_export_for_triggered_acquisition_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the__0__instrument_lacks_support_for_dire" + "ct_method_export_for_triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The current document contains peptides without enough information to rank transitions for triggered acquisition.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__The_current_document_contains_peptides_without_enough_information_to_rank_transitions_for_triggered_acquisition_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__The_current_document_contains_peptides_wi" + "thout_enough_information_to_rank_transitions_for_triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The file {0} could not be saved. Check that the specified file directory exists and is writeable.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__The_file__0__could_not_be_saved___Check_that_the_specified_file_directory_exists_and_is_writeable_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__The_file__0__could_not_be_saved___Check_t" + "hat_the_specified_file_directory_exists_and_is_writeable_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The folder {0} does not appear to contain an Agilent QQQ method template. The folder is expected to have a .m extension, and contain the file qqqacqmethod.xsd.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__The_folder__0__does_not_appear_to_contain_an_Agilent_QQQ_method_template___The_folder_is_expected_to_have_a__m_extension__and_contain_the_file_qqqacqmethod_xsd_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__The_folder__0__does_not_appear_to_contain" + "_an_Agilent_QQQ_method_template___The_folder_is_expected_to_have_a__m_extension_" + "_and_contain_the_file_qqqacqmethod_xsd_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the instrument type {0} does not support triggered acquisition.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the_instrument_type__0__does_not_support_triggered_acquisition_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the_instrument_type__0__does_not_support_" + "triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the retention time prediction calculator is unable to score. Check the calculator settings.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the_retention_time_prediction_calculator_is_unable_to_score___Check_the_calculator_settings_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the_retention_time_prediction_calculator_" + "is_unable_to_score___Check_the_calculator_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the retention time predictor is unable to auto-calculate a regression. Check to make sure the document contains times for all of the required standard peptides.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the_retention_time_predictor_is_unable_to_auto_calculate_a_regression___Check_to_make_sure_the_document_contains_times_for_all_of_the_required_standard_peptides_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the_retention_time_predictor_is_unable_to" + "_auto_calculate_a_regression___Check_to_make_sure_the_document_contains_times_fo" + "r_all_of_the_required_standard_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the specified instrument {0} is not compatible with scheduled methods.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the_specified_instrument__0__is_not_compatible_with_scheduled_methods_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the_specified_instrument__0__is_not_compa" + "tible_with_scheduled_methods_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the specified replicate {0} does not exist in the document.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__the_specified_replicate__0__does_not_exist_in_the_document_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__the_specified_replicate__0__does_not_exis" + "t_in_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The template extension {0} does not match the expected extension for the instrument {1}. No method will be exported.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__The_template_extension__0__does_not_match_the_expected_extension_for_the_instrument__1___No_method_will_be_exported_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__The_template_extension__0__does_not_match" + "_the_expected_extension_for_the_instrument__1___No_method_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The template file {0} does not exist.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__The_template_file__0__does_not_exist_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__The_template_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: to export a scheduled method, you must first choose a retention time predictor in Peptide Settings / Prediction.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__to_export_a_scheduled_method__you_must_first_choose_a_retention_time_predictor_in_Peptide_Settings___Prediction_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__to_export_a_scheduled_method__you_must_fi" + "rst_choose_a_retention_time_predictor_in_Peptide_Settings___Prediction_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: to export a scheduled method, you must first choose a retention time predictor in Peptide Settings / Prediction, or import results for all peptides in the document.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__to_export_a_scheduled_method__you_must_first_choose_a_retention_time_predictor_in_Peptide_Settings___Prediction__or_import_results_for_all_peptides_in_the_document_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__to_export_a_scheduled_method__you_must_fi" + "rst_choose_a_retention_time_predictor_in_Peptide_Settings___Prediction__or_impor" + "t_results_for_all_peptides_in_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: To export a scheduled method, you must first import results for all peptides in the document.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__To_export_a_scheduled_method__you_must_first_import_results_for_all_peptides_in_the_document_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__To_export_a_scheduled_method__you_must_fi" + "rst_import_results_for_all_peptides_in_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: triggered acquistion requires a spectral library or imported results in order to rank transitions.. /// </summary> public static string CommandLine_ExportInstrumentFile_Error__triggered_acquistion_requires_a_spectral_library_or_imported_results_in_order_to_rank_transitions_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Error__triggered_acquistion_requires_a_spectral_" + "library_or_imported_results_in_order_to_rank_transitions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List {0} exported successfully.. /// </summary> public static string CommandLine_ExportInstrumentFile_List__0__exported_successfully_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_List__0__exported_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Method {0} exported successfully.. /// </summary> public static string CommandLine_ExportInstrumentFile_Method__0__exported_successfully_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Method__0__exported_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No list will be exported.. /// </summary> public static string CommandLine_ExportInstrumentFile_No_list_will_be_exported_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_No_list_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No method will be exported.. /// </summary> public static string CommandLine_ExportInstrumentFile_No_method_will_be_exported_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_No_method_will_be_exported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Max transitions per injection must be set to some value between {0} and {1} for export strategies &quot;protein&quot; and &quot;buckets&quot; and for scheduled methods. You specified {3}. Defaulting to {2}.. /// </summary> public static string CommandLine_ExportInstrumentFile_Warning__Max_transitions_per_injection_must_be_set_to_some_value_between__0__and__1__for_export_strategies__protein__and__buckets__and_for_scheduled_methods__You_specified__3___Defaulting_to__2__ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Warning__Max_transitions_per_injection_must_be_s" + "et_to_some_value_between__0__and__1__for_export_strategies__protein__and__bucket" + "s__and_for_scheduled_methods__You_specified__3___Defaulting_to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: No export strategy specified (from &quot;single&quot;, &quot;protein&quot; or &quot;buckets&quot;). Defaulting to &quot;single&quot;.. /// </summary> public static string CommandLine_ExportInstrumentFile_Warning__No_export_strategy_specified__from__single____protein__or__buckets____Defaulting_to__single__ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Warning__No_export_strategy_specified__from__sin" + "gle____protein__or__buckets____Defaulting_to__single__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The add-energy-ramp parameter is only applicable for Thermo transition lists. This parameter will be ignored.. /// </summary> public static string CommandLine_ExportInstrumentFile_Warning__The_add_energy_ramp_parameter_is_only_applicable_for_Thermo_transition_lists__This_parameter_will_be_ignored_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Warning__The_add_energy_ramp_parameter_is_only_a" + "pplicable_for_Thermo_transition_lists__This_parameter_will_be_ignored_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The vendor {0} does not match the vendor in either the CE or DP prediction setting. Continuing exporting a transition list anyway.... /// </summary> public static string CommandLine_ExportInstrumentFile_Warning__The_vendor__0__does_not_match_the_vendor_in_either_the_CE_or_DP_prediction_setting___Continuing_exporting_a_transition_list_anyway___ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_Warning__The_vendor__0__does_not_match_the_vendo" + "r_in_either_the_CE_or_DP_prediction_setting___Continuing_exporting_a_transition_" + "list_anyway___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must export a {0} transition list and manually import it into a method file using vendor software.. /// </summary> public static string CommandLine_ExportInstrumentFile_You_must_export_a__0__transition_list_and_manually_import_it_into_a_method_file_using_vendor_software_ { get { return ResourceManager.GetString("CommandLine_ExportInstrumentFile_You_must_export_a__0__transition_list_and_manual" + "ly_import_it_into_a_method_file_using_vendor_software_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check to make sure it is not read-only.. /// </summary> public static string CommandLine_ExportLiveReport_Check_to_make_sure_it_is_not_read_only_ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Check_to_make_sure_it_is_not_read_only_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failure attempting to save {0} report to {1}.. /// </summary> public static string CommandLine_ExportLiveReport_Error__Failure_attempting_to_save__0__report_to__1__ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Error__Failure_attempting_to_save__0__report_to__1__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The report {0} could not be saved to {1}.. /// </summary> public static string CommandLine_ExportLiveReport_Error__The_report__0__could_not_be_saved_to__1__ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Error__The_report__0__could_not_be_saved_to__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The report {0} does not exist. If it has spaces in its name, use &quot;double quotes&quot; around the entire list of command parameters.. /// </summary> public static string CommandLine_ExportLiveReport_Error__The_report__0__does_not_exist__If_it_has_spaces_in_its_name__use__double_quotes__around_the_entire_list_of_command_parameters_ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Error__The_report__0__does_not_exist__If_it_has_spac" + "es_in_its_name__use__double_quotes__around_the_entire_list_of_command_parameters" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting report {0}.... /// </summary> public static string CommandLine_ExportLiveReport_Exporting_report__0____ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Exporting_report__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Report {0} exported successfully to {1}.. /// </summary> public static string CommandLine_ExportLiveReport_Report__0__exported_successfully_to__1__ { get { return ResourceManager.GetString("CommandLine_ExportLiveReport_Report__0__exported_successfully_to__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: If you specify a report, you must specify the --report-file=path/to/file.csv parameter.. /// </summary> public static string CommandLine_ExportReport_ { get { return ResourceManager.GetString("CommandLine_ExportReport_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failure attempting to save {0} report to {1}.. /// </summary> public static string CommandLine_ExportReport_Error__Failure_attempting_to_save__0__report_to__1__ { get { return ResourceManager.GetString("CommandLine_ExportReport_Error__Failure_attempting_to_save__0__report_to__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The report {0} could not be saved to {1}. Check to make sure it is not read-only.. /// </summary> public static string CommandLine_ExportReport_Error__The_report__0__could_not_be_saved_to__1____Check_to_make_sure_it_is_not_read_only_ { get { return ResourceManager.GetString("CommandLine_ExportReport_Error__The_report__0__could_not_be_saved_to__1____Check_" + "to_make_sure_it_is_not_read_only_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The report {0} does not exist. If it has spaces in its name, use &quot;double quotes&quot; around the entire list of command parameters.. /// </summary> public static string CommandLine_ExportReport_Error__The_report__0__does_not_exist__If_it_has_spaces_in_its_name__use__double_quotes__around_the_entire_list_of_command_parameters_ { get { return ResourceManager.GetString("CommandLine_ExportReport_Error__The_report__0__does_not_exist__If_it_has_spaces_i" + "n_its_name__use__double_quotes__around_the_entire_list_of_command_parameters_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting report {0}.... /// </summary> public static string CommandLine_ExportReport_Exporting_report__0____ { get { return ResourceManager.GetString("CommandLine_ExportReport_Exporting_report__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Report {0} exported successfully.. /// </summary> public static string CommandLine_ExportReport_Report__0__exported_successfully_ { get { return ResourceManager.GetString("CommandLine_ExportReport_Report__0__exported_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Could not find the background proteome file {0}.. /// </summary> public static string CommandLine_FindBackgroundProteome_Warning__Could_not_find_the_background_proteome_file__0__ { get { return ResourceManager.GetString("CommandLine_FindBackgroundProteome_Warning__Could_not_find_the_background_proteom" + "e_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Could not find the ion mobility library {0}.. /// </summary> public static string CommandLine_FindIonMobilityDatabase_Error__Could_not_find_the_ion_mobility_library__0__ { get { return ResourceManager.GetString("CommandLine_FindIonMobilityDatabase_Error__Could_not_find_the_ion_mobility_librar" + "y__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Could not find the iRT database {0}.. /// </summary> public static string CommandLine_FindIrtDatabase_Error__Could_not_find_the_iRT_database__0__ { get { return ResourceManager.GetString("CommandLine_FindIrtDatabase_Error__Could_not_find_the_iRT_database__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find the optimization library {0}.. /// </summary> public static string CommandLine_FindOptimizationDatabase_Could_not_find_the_optimization_library__0__ { get { return ResourceManager.GetString("CommandLine_FindOptimizationDatabase_Could_not_find_the_optimization_library__0__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: {0}. /// </summary> public static string CommandLine_GeneralException_Error___0_ { get { return ResourceManager.GetString("CommandLine_GeneralException_Error___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failure reading file information from directory {0}.. /// </summary> public static string CommandLine_GetDataSources_Error__Failure_reading_file_information_from_directory__0__ { get { return ResourceManager.GetString("CommandLine_GetDataSources_Error__Failure_reading_file_information_from_directory" + "__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: No data sources found in directory {0}.. /// </summary> public static string CommandLine_GetDataSources_Error__No_data_sources_found_in_directory__0__ { get { return ResourceManager.GetString("CommandLine_GetDataSources_Error__No_data_sources_found_in_directory__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed while reading annotations.. /// </summary> public static string CommandLine_ImportAnnotations_Error__Failed_while_reading_annotations_ { get { return ResourceManager.GetString("CommandLine_ImportAnnotations_Error__Failed_while_reading_annotations_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing FASTA file {0}.... /// </summary> public static string CommandLine_ImportFasta_Importing_FASTA_file__0____ { get { return ResourceManager.GetString("CommandLine_ImportFasta_Importing_FASTA_file__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No replicates left to import.. /// </summary> public static string CommandLine_ImportResults_No_replicates_left_to_import_ { get { return ResourceManager.GetString("CommandLine_ImportResults_No_replicates_left_to_import_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} -&gt; {1} Note: The file has already been imported. Ignoring.... /// </summary> public static string CommandLine_ImportResultsFile__0______1___Note__The_file_has_already_been_imported__Ignoring___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile__0______1___Note__The_file_has_already_been_importe" + "d__Ignoring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding results.... /// </summary> public static string CommandLine_ImportResultsFile_Adding_results___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Adding_results___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed importing the results file {0}.. /// </summary> public static string CommandLine_ImportResultsFile_Error__Failed_importing_the_results_file__0__ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Error__Failed_importing_the_results_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File write date {0} is after --import-before date {1}. Ignoring.... /// </summary> public static string CommandLine_ImportResultsFile_File_write_date__0__is_after___import_before_date__1___Ignoring___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_File_write_date__0__is_after___import_before_date__" + "1___Ignoring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File write date {0} is before --import-on-or-after date {1}. Ignoring.... /// </summary> public static string CommandLine_ImportResultsFile_File_write_date__0__is_before___import_on_or_after_date__1___Ignoring___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_File_write_date__0__is_before___import_on_or_after_" + "date__1___Ignoring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results added from {0} to replicate {1}.. /// </summary> public static string CommandLine_ImportResultsFile_Results_added_from__0__to_replicate__1__ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Results_added_from__0__to_replicate__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Cannot read file {0}. Ignoring.... /// </summary> public static string CommandLine_ImportResultsFile_Warning__Cannot_read_file__0____Ignoring___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Warning__Cannot_read_file__0____Ignoring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Failed importing the results file {0}. Ignoring.... /// </summary> public static string CommandLine_ImportResultsFile_Warning__Failed_importing_the_results_file__0____Ignoring___ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Warning__Failed_importing_the_results_file__0____Ig" + "noring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The replicate {0} already exists in the given document and the --import-append option is not specified. The replicate will not be added to the document.. /// </summary> public static string CommandLine_ImportResultsFile_Warning__The_replicate__0__already_exists_in_the_given_document_and_the___import_append_option_is_not_specified___The_replicate_will_not_be_added_to_the_document_ { get { return ResourceManager.GetString("CommandLine_ImportResultsFile_Warning__The_replicate__0__already_exists_in_the_gi" + "ven_document_and_the___import_append_option_is_not_specified___The_replicate_wil" + "l_not_be_added_to_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Could not get last write time for file {0}.. /// </summary> public static string CommandLine_ImportResultsInDir_Error__Could_not_get_last_write_time_for_file__0__ { get { return ResourceManager.GetString("CommandLine_ImportResultsInDir_Error__Could_not_get_last_write_time_for_file__0__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding {0} modifications.. /// </summary> public static string CommandLine_ImportSearch_Adding__0__modifications_ { get { return ResourceManager.GetString("CommandLine_ImportSearch_Adding__0__modifications_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding 1 modification.. /// </summary> public static string CommandLine_ImportSearch_Adding_1_modification_ { get { return ResourceManager.GetString("CommandLine_ImportSearch_Adding_1_modification_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Creating spectral library from files:. /// </summary> public static string CommandLine_ImportSearch_Creating_spectral_library_from_files_ { get { return ResourceManager.GetString("CommandLine_ImportSearch_Creating_spectral_library_from_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading library. /// </summary> public static string CommandLine_ImportSearch_Loading_library { get { return ResourceManager.GetString("CommandLine_ImportSearch_Loading_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: Unable to locate results file &apos;{0}&apos;. /// </summary> public static string CommandLine_ImportSearch_Warning__Unable_to_locate_results_file___0__ { get { return ResourceManager.GetString("CommandLine_ImportSearch_Warning__Unable_to_locate_results_file___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be set when using CiRT peptides.. /// </summary> public static string CommandLine_ImportSearchInternal__0__must_be_set_when_using_CiRT_peptides_ { get { return ResourceManager.GetString("CommandLine_ImportSearchInternal__0__must_be_set_when_using_CiRT_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT standard set to {0}, but multiple iRT standards were found. iRT standard must be set explicitly.. /// </summary> public static string CommandLine_ImportSearchInternal_iRT_standard_set_to__0___but_multiple_iRT_standards_were_found__iRT_standard_must_be_set_explicitly_ { get { return ResourceManager.GetString("CommandLine_ImportSearchInternal_iRT_standard_set_to__0___but_multiple_iRT_standa" + "rds_were_found__iRT_standard_must_be_set_explicitly_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The iRT standard name &apos;{0}&apos; is invalid.. /// </summary> public static string CommandLine_ImportSearchInternal_The_iRT_standard_name___0___is_invalid_ { get { return ResourceManager.GetString("CommandLine_ImportSearchInternal_The_iRT_standard_name___0___is_invalid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading {0}. /// {1}. /// </summary> public static string CommandLine_ImportSkyr_ { get { return ResourceManager.GetString("CommandLine_ImportSkyr_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: {0} does not exist. --report-add command failed.. /// </summary> public static string CommandLine_ImportSkyr_Error___0__does_not_exist____report_add_command_failed_ { get { return ResourceManager.GetString("CommandLine_ImportSkyr_Error___0__does_not_exist____report_add_command_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Success! Imported Reports from {0}. /// </summary> public static string CommandLine_ImportSkyr_Success__Imported_Reports_from__0_ { get { return ResourceManager.GetString("CommandLine_ImportSkyr_Success__Imported_Reports_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: A tool titled {0} already exists. Please use --tool-conflict-resolution=&lt; overwrite | skip &gt;. Tool titled {0} was not added.. /// </summary> public static string CommandLine_ImportTool_ { get { return ResourceManager.GetString("CommandLine_ImportTool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} was added to the Tools Menu.. /// </summary> public static string CommandLine_ImportTool__0__was_added_to_the_Tools_Menu_ { get { return ResourceManager.GetString("CommandLine_ImportTool__0__was_added_to_the_Tools_Menu_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: If {0} is and argument the tool must have a Report Title. Use the --tool-report parameter to specify a report.. /// </summary> public static string CommandLine_ImportTool_Error__If__0__is_and_argument_the_tool_must_have_a_Report_Title__Use_the___tool_report_parameter_to_specify_a_report_ { get { return ResourceManager.GetString("CommandLine_ImportTool_Error__If__0__is_and_argument_the_tool_must_have_a_Report_" + "Title__Use_the___tool_report_parameter_to_specify_a_report_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Please import the report format for {0}. Use the --report-add parameter to add the missing custom report.. /// </summary> public static string CommandLine_ImportTool_Error__Please_import_the_report_format_for__0____Use_the___report_add_parameter_to_add_the_missing_custom_report_ { get { return ResourceManager.GetString("CommandLine_ImportTool_Error__Please_import_the_report_format_for__0____Use_the__" + "_report_add_parameter_to_add_the_missing_custom_report_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the provided command for the tool {0} is not of a supported type. Supported Types are: {1}. /// </summary> public static string CommandLine_ImportTool_Error__the_provided_command_for_the_tool__0__is_not_of_a_supported_type___Supported_Types_are___1_ { get { return ResourceManager.GetString("CommandLine_ImportTool_Error__the_provided_command_for_the_tool__0__is_not_of_a_s" + "upported_type___Supported_Types_are___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: to import a tool it must have a name and a command. Use --tool-add to specify a name and use --tool-command to specify a command. The tool was not imported.... /// </summary> public static string CommandLine_ImportTool_Error__to_import_a_tool_it_must_have_a_name_and_a_command___Use___tool_add_to_specify_a_name_and_use___tool_command_to_specify_a_command___The_tool_was_not_imported___ { get { return ResourceManager.GetString("CommandLine_ImportTool_Error__to_import_a_tool_it_must_have_a_name_and_a_command_" + "__Use___tool_add_to_specify_a_name_and_use___tool_command_to_specify_a_command__" + "_The_tool_was_not_imported___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool was not imported.... /// </summary> public static string CommandLine_ImportTool_The_tool_was_not_imported___ { get { return ResourceManager.GetString("CommandLine_ImportTool_The_tool_was_not_imported___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: skipping tool {0} due to a name conflict.. /// </summary> public static string CommandLine_ImportTool_Warning__skipping_tool__0__due_to_a_name_conflict_ { get { return ResourceManager.GetString("CommandLine_ImportTool_Warning__skipping_tool__0__due_to_a_name_conflict_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: the tool {0} was overwritten. /// </summary> public static string CommandLine_ImportTool_Warning__the_tool__0__was_overwritten { get { return ResourceManager.GetString("CommandLine_ImportTool_Warning__the_tool__0__was_overwritten", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Canceled installing tools from {0}.. /// </summary> public static string CommandLine_ImportToolsFromZip_Canceled_installing_tools_from__0__ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Canceled_installing_tools_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the file specified with the --tool-add-zip command does not exist. Please verify the file location and try again.. /// </summary> public static string CommandLine_ImportToolsFromZip_Error__the_file_specified_with_the___tool_add_zip_command_does_not_exist__Please_verify_the_file_location_and_try_again_ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Error__the_file_specified_with_the___tool_add_zip_" + "command_does_not_exist__Please_verify_the_file_location_and_try_again_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: the file specified with the --tool-add-zip command is not a .zip file. Please specify a valid .zip file.. /// </summary> public static string CommandLine_ImportToolsFromZip_Error__the_file_specified_with_the___tool_add_zip_command_is_not_a__zip_file__Please_specify_a_valid__zip_file_ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Error__the_file_specified_with_the___tool_add_zip_" + "command_is_not_a__zip_file__Please_specify_a_valid__zip_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: to import tools from a zip you must specify a path --tool-add-zip must be followed by an existing path.. /// </summary> public static string CommandLine_ImportToolsFromZip_Error__to_import_tools_from_a_zip_you_must_specify_a_path___tool_add_zip_must_be_followed_by_an_existing_path_ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Error__to_import_tools_from_a_zip_you_must_specify" + "_a_path___tool_add_zip_must_be_followed_by_an_existing_path_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installed tool {0}. /// </summary> public static string CommandLine_ImportToolsFromZip_Installed_tool__0_ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Installed_tool__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing tools from {0}. /// </summary> public static string CommandLine_ImportToolsFromZip_Installing_tools_from__0_ { get { return ResourceManager.GetString("CommandLine_ImportToolsFromZip_Installing_tools_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding {0} spectra to the library {1}. /// </summary> public static string CommandLine_ImportTransitionList_Adding__0__spectra_to_the_library__1_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Adding__0__spectra_to_the_library__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: (line {0}, column {1}) {2}. /// </summary> public static string CommandLine_ImportTransitionList_Error___line__0___column__1____2_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error___line__0___column__1____2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Imported assay library {0} lacks ion abundance values.. /// </summary> public static string CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_ion_abundance_values_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_ion_abun" + "dance_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Imported assay library {0} lacks iRT and ion abundance values.. /// </summary> public static string CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_iRT_and_ion_abundance_values_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_iRT_and_" + "ion_abundance_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Imported assay library {0} lacks iRT values.. /// </summary> public static string CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_iRT_values_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__Imported_assay_library__0__lacks_iRT_valu" + "es_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The name {0} specified with {1} was not found in the imported assay library.. /// </summary> public static string CommandLine_ImportTransitionList_Error__The_name__0__specified_with__1__was_not_found_in_the_imported_assay_library_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__The_name__0__specified_with__1__was_not_f" + "ound_in_the_imported_assay_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There is an existing library with the same name {0} as the document library to be created.. /// </summary> public static string CommandLine_ImportTransitionList_Error__There_is_an_existing_library_with_the_same_name__0__as_the_document_library_to_be_created_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__There_is_an_existing_library_with_the_sam" + "e_name__0__as_the_document_library_to_be_created_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: To create the iRT database &apos;{0}&apos; for this assay library, you must specify the iRT standards using either of the arguments {1} or {2}. /// </summary> public static string CommandLine_ImportTransitionList_Error__To_create_the_iRT_database___0___for_this_assay_library__you_must_specify_the_iRT_standards_using_either_of_the_arguments__1__or__2_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Error__To_create_the_iRT_database___0___for_this" + "_assay_library__you_must_specify_the_iRT_standards_using_either_of_the_arguments" + "__1__or__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finishing up import. /// </summary> public static string CommandLine_ImportTransitionList_Finishing_up_import { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Finishing_up_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing {0} iRT values into the iRT calculator {1}. /// </summary> public static string CommandLine_ImportTransitionList_Importing__0__iRT_values_into_the_iRT_calculator__1_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Importing__0__iRT_values_into_the_iRT_calculator" + "__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing iRT transition list {0}. /// </summary> public static string CommandLine_ImportTransitionList_Importing_iRT_transition_list__0_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Importing_iRT_transition_list__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing transiton list {0}.... /// </summary> public static string CommandLine_ImportTransitionList_Importing_transiton_list__0____ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Importing_transiton_list__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: (line {0}, column {1}) {2}. /// </summary> public static string CommandLine_ImportTransitionList_Warning___line__0___column__1____2_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Warning___line__0___column__1____2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The document is missing iRT standards. /// </summary> public static string CommandLine_ImportTransitionList_Warning__The_document_is_missing_iRT_standards { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Warning__The_document_is_missing_iRT_standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning: The iRT calculator already contains {0} with the value {1}. Ignoring {2}. /// </summary> public static string CommandLine_ImportTransitionList_Warning__The_iRT_calculator_already_contains__0__with_the_value__1___Ignoring__2_ { get { return ResourceManager.GetString("CommandLine_ImportTransitionList_Warning__The_iRT_calculator_already_contains__0_" + "_with_the_value__1___Ignoring__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Added: {0}. /// </summary> public static string CommandLine_LogDocumentDelta_Added___0_ { get { return ResourceManager.GetString("CommandLine_LogDocumentDelta_Added___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removed: {0}. /// </summary> public static string CommandLine_LogDocumentDelta_Removed___0_ { get { return ResourceManager.GetString("CommandLine_LogDocumentDelta_Removed___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document unchanged. /// </summary> public static string CommandLine_LogNewEntries_Document_unchanged { get { return ResourceManager.GetString("CommandLine_LogNewEntries_Document_unchanged", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate &apos;{0}&apos; already exists in the document, using &apos;{1}&apos; instead.. /// </summary> public static string CommandLine_MakeReplicateNamesUnique_Replicate___0___already_exists_in_the_document__using___1___instead_ { get { return ResourceManager.GetString("CommandLine_MakeReplicateNamesUnique_Replicate___0___already_exists_in_the_docume" + "nt__using___1___instead_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The Skyline file {0} does not exist.. /// </summary> public static string CommandLine_OpenSkyFile_Error__The_Skyline_file__0__does_not_exist_ { get { return ResourceManager.GetString("CommandLine_OpenSkyFile_Error__The_Skyline_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There was an error opening the file {0}. /// </summary> public static string CommandLine_OpenSkyFile_Error__There_was_an_error_opening_the_file__0_ { get { return ResourceManager.GetString("CommandLine_OpenSkyFile_Error__There_was_an_error_opening_the_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File {0} opened.. /// </summary> public static string CommandLine_OpenSkyFile_File__0__opened_ { get { return ResourceManager.GetString("CommandLine_OpenSkyFile_File__0__opened_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opening file.... /// </summary> public static string CommandLine_OpenSkyFile_Opening_file___ { get { return ResourceManager.GetString("CommandLine_OpenSkyFile_Opening_file___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose one of {0}. /// </summary> public static string CommandLine_RefineDocument_Choose_one_of__0_ { get { return ResourceManager.GetString("CommandLine_RefineDocument_Choose_one_of__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The label type &apos;{0}&apos; was not found in the document.. /// </summary> public static string CommandLine_RefineDocument_Error__The_label_type___0___was_not_found_in_the_document_ { get { return ResourceManager.GetString("CommandLine_RefineDocument_Error__The_label_type___0___was_not_found_in_the_docum" + "ent_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Refining document.... /// </summary> public static string CommandLine_RefineDocument_Refining_document___ { get { return ResourceManager.GetString("CommandLine_RefineDocument_Refining_document___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed to reintegrate peaks successfully.. /// </summary> public static string CommandLine_Reintegrate_Error__Failed_to_reintegrate_peaks_successfully_ { get { return ResourceManager.GetString("CommandLine_Reintegrate_Error__Failed_to_reintegrate_peaks_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The current peak scoring model is incompatible with one or more peptides in the document. Please train a new model.. /// </summary> public static string CommandLine_Reintegrate_Error__The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document__Please_train_a_new_model_ { get { return ResourceManager.GetString("CommandLine_Reintegrate_Error__The_current_peak_scoring_model_is_incompatible_wit" + "h_one_or_more_peptides_in_the_document__Please_train_a_new_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Unknown peak scoring model &apos;{0}&apos;. /// </summary> public static string CommandLine_ReintegratePeaks_Error__Unknown_peak_scoring_model___0__ { get { return ResourceManager.GetString("CommandLine_ReintegratePeaks_Error__Unknown_peak_scoring_model___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: You must first import results into the document before reintegrating.. /// </summary> public static string CommandLine_ReintegratePeaks_Error__You_must_first_import_results_into_the_document_before_reintegrating_ { get { return ResourceManager.GetString("CommandLine_ReintegratePeaks_Error__You_must_first_import_results_into_the_docume" + "nt_before_reintegrating_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} -&gt; {1} Note: The file has already been imported. Ignoring.... /// </summary> public static string CommandLine_RemoveImportedFiles__0______1___Note__The_file_has_already_been_imported__Ignoring___ { get { return ResourceManager.GetString("CommandLine_RemoveImportedFiles__0______1___Note__The_file_has_already_been_impor" + "ted__Ignoring___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removed {0}.. /// </summary> public static string CommandLine_RemoveResults_Removed__0__ { get { return ResourceManager.GetString("CommandLine_RemoveResults_Removed__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removing all results. /// </summary> public static string CommandLine_RemoveResults_Removing_all_results { get { return ResourceManager.GetString("CommandLine_RemoveResults_Removing_all_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removing results before . /// </summary> public static string CommandLine_RemoveResults_Removing_results_before_ { get { return ResourceManager.GetString("CommandLine_RemoveResults_Removing_results_before_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed importing the file {0}. {1}. /// </summary> public static string CommandLine_Run_Error__Failed_importing_the_file__0____1_ { get { return ResourceManager.GetString("CommandLine_Run_Error__Failed_importing_the_file__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed to get optimization function {0}. {1}. /// </summary> public static string CommandLine_Run_Error__Failed_to_get_optimization_function__0____1_ { get { return ResourceManager.GetString("CommandLine_Run_Error__Failed_to_get_optimization_function__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed to open log file {0}. /// </summary> public static string CommandLine_Run_Error__Failed_to_open_log_file__0_ { get { return ResourceManager.GetString("CommandLine_Run_Error__Failed_to_open_log_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: You cannot simultaneously export a transition list and a method. Neither will be exported. Please change the command line parameters.. /// </summary> public static string CommandLine_Run_Error__You_cannot_simultaneously_export_a_transition_list_and_a_method___Neither_will_be_exported__ { get { return ResourceManager.GetString("CommandLine_Run_Error__You_cannot_simultaneously_export_a_transition_list_and_a_m" + "ethod___Neither_will_be_exported__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exiting.... /// </summary> public static string CommandLine_Run_Exiting___ { get { return ResourceManager.GetString("CommandLine_Run_Exiting___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No new results added. Skipping Panorama import.. /// </summary> public static string CommandLine_Run_No_new_results_added__Skipping_Panorama_import_ { get { return ResourceManager.GetString("CommandLine_Run_No_new_results_added__Skipping_Panorama_import_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not setting library.. /// </summary> public static string CommandLine_Run_Not_setting_library_ { get { return ResourceManager.GetString("CommandLine_Run_Not_setting_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Writing to log file {0}. /// </summary> public static string CommandLine_Run_Writing_to_log_file__0_ { get { return ResourceManager.GetString("CommandLine_Run_Writing_to_log_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: {0} does not exist. --batch-commands failed.. /// </summary> public static string CommandLine_RunBatchCommands_Error___0__does_not_exist____batch_commands_failed_ { get { return ResourceManager.GetString("CommandLine_RunBatchCommands_Error___0__does_not_exist____batch_commands_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: failed to open file {0} --batch-commands command failed.. /// </summary> public static string CommandLine_RunBatchCommands_Error__failed_to_open_file__0____batch_commands_command_failed_ { get { return ResourceManager.GetString("CommandLine_RunBatchCommands_Error__failed_to_open_file__0____batch_commands_comm" + "and_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The file could not be saved to {0}. Check that the directory exists and is not read-only.. /// </summary> public static string CommandLine_SaveFile_Error__The_file_could_not_be_saved_to__0____Check_that_the_directory_exists_and_is_not_read_only_ { get { return ResourceManager.GetString("CommandLine_SaveFile_Error__The_file_could_not_be_saved_to__0____Check_that_the_d" + "irectory_exists_and_is_not_read_only_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File {0} saved.. /// </summary> public static string CommandLine_SaveFile_File__0__saved_ { get { return ResourceManager.GetString("CommandLine_SaveFile_File__0__saved_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Saving file.... /// </summary> public static string CommandLine_SaveFile_Saving_file___ { get { return ResourceManager.GetString("CommandLine_SaveFile_Saving_file___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed saving to the user configuration file.. /// </summary> public static string CommandLine_SaveSettings_Error__Failed_saving_to_the_user_configuration_file_ { get { return ResourceManager.GetString("CommandLine_SaveSettings_Error__Failed_saving_to_the_user_configuration_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed attempting to change the transition filter settings.. /// </summary> public static string CommandLine_SetFilterSettings_Error__Failed_attempting_to_change_the_transition_filter_settings_ { get { return ResourceManager.GetString("CommandLine_SetFilterSettings_Error__Failed_attempting_to_change_the_transition_f" + "ilter_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan extraction to +/- {0} minutes from MS/MS IDs.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_extraction_to______0__minutes_from_MS_MS_IDs_ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_extraction_to______0__minutes_" + "from_MS_MS_IDs_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan extraction to +/- {0} minutes from predicted value.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_extraction_to______0__minutes_from_predicted_value_ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_extraction_to______0__minutes_" + "from_predicted_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full scan precursor mass accuracy to {0} ppm.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_precursor_mass_accuracy_to__0__ppm_ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_precursor_mass_accuracy_to__0_" + "_ppm_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan precursor resolution to {0}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolution_to__0__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolution_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan precursor resolving power to {0}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolving_power_to__0__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolving_power_to__" + "0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan precursor resolving power to {0} at {1}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolving_power_to__0__at__1__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_precursor_resolving_power_to__" + "0__at__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full scan product mass accuracy to {0} ppm.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_product_mass_accuracy_to__0__ppm_ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_product_mass_accuracy_to__0__p" + "pm_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan product resolution to {0}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_product_resolution_to__0__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_product_resolution_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan product resolving power to {0}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_product_resolving_power_to__0__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_product_resolving_power_to__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing full-scan product resolving power to {0} at {1}.. /// </summary> public static string CommandLine_SetFullScanSettings_Changing_full_scan_product_resolving_power_to__0__at__1__ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Changing_full_scan_product_resolving_power_to__0_" + "_at__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed attempting to change the transiton full-scan settings.. /// </summary> public static string CommandLine_SetFullScanSettings_Error__Failed_attempting_to_change_the_transiton_full_scan_settings_ { get { return ResourceManager.GetString("CommandLine_SetFullScanSettings_Error__Failed_attempting_to_change_the_transiton_" + "full_scan_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing ion mobility spectral library resolving power to {0}.. /// </summary> public static string CommandLine_SetImsSettings_Changing_ion_mobility_spectral_library_resolving_power_to__0__ { get { return ResourceManager.GetString("CommandLine_SetImsSettings_Changing_ion_mobility_spectral_library_resolving_power" + "_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Enabling extraction based on spectral library ion mobility values.. /// </summary> public static string CommandLine_SetImsSettings_Enabling_extraction_based_on_spectral_library_ion_mobility_values_ { get { return ResourceManager.GetString("CommandLine_SetImsSettings_Enabling_extraction_based_on_spectral_library_ion_mobi" + "lity_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed attempting to change the ion mobility settings.. /// </summary> public static string CommandLine_SetImsSettings_Error__Failed_attempting_to_change_the_ion_mobility_settings_ { get { return ResourceManager.GetString("CommandLine_SetImsSettings_Error__Failed_attempting_to_change_the_ion_mobility_se" + "ttings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Cannot set library name without path.. /// </summary> public static string CommandLine_SetLibrary_Error__Cannot_set_library_name_without_path_ { get { return ResourceManager.GetString("CommandLine_SetLibrary_Error__Cannot_set_library_name_without_path_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The file {0} appears to be a redundant library.. /// </summary> public static string CommandLine_SetLibrary_Error__The_file__0__appears_to_be_a_redundant_library_ { get { return ResourceManager.GetString("CommandLine_SetLibrary_Error__The_file__0__appears_to_be_a_redundant_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The file {0} does not exist.. /// </summary> public static string CommandLine_SetLibrary_Error__The_file__0__does_not_exist_ { get { return ResourceManager.GetString("CommandLine_SetLibrary_Error__The_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The file {0} is not a supported spectral library file format.. /// </summary> public static string CommandLine_SetLibrary_Error__The_file__0__is_not_a_supported_spectral_library_file_format_ { get { return ResourceManager.GetString("CommandLine_SetLibrary_Error__The_file__0__is_not_a_supported_spectral_library_fi" + "le_format_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The library you are trying to add conflicts with a library already in the file.. /// </summary> public static string CommandLine_SetLibrary_Error__The_library_you_are_trying_to_add_conflicts_with_a_library_already_in_the_file_ { get { return ResourceManager.GetString("CommandLine_SetLibrary_Error__The_library_you_are_trying_to_add_conflicts_with_a_" + "library_already_in_the_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed attempting to change the transition prediction settings.. /// </summary> public static string CommandLine_SetPredictTranSettings_Error__Failed_attempting_to_change_the_transition_prediction_settings_ { get { return ResourceManager.GetString("CommandLine_SetPredictTranSettings_Error__Failed_attempting_to_change_the_transit" + "ion_prediction_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use the argument --import-search-prefer-embedded-spectra to force the library build to use embedded spectra, or place the original spectrum files next to the input files (with a supported file extension: {0}) and rerun.. /// </summary> public static string CommandLine_ShowLibraryMissingExternalSpectraError_DescriptionWithSupportedExtensions__0__ { get { return ResourceManager.GetString("CommandLine_ShowLibraryMissingExternalSpectraError_DescriptionWithSupportedExtens" + "ions__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error. /// </summary> public static string CommandLineTest_ConsoleAddFastaTest_Error { get { return ResourceManager.GetString("CommandLineTest_ConsoleAddFastaTest_Error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning. /// </summary> public static string CommandLineTest_ConsoleAddFastaTest_Warning { get { return ResourceManager.GetString("CommandLineTest_ConsoleAddFastaTest_Warning", resourceCulture); } } /// <summary> /// Looks up a localized string similar to successfully.. /// </summary> public static string CommandLineTest_ConsolePathCoverage_successfully_ { get { return ResourceManager.GetString("CommandLineTest_ConsolePathCoverage_successfully_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Message: . /// </summary> public static string CommandProgressMonitor_UpdateProgressInternal_Message__ { get { return ResourceManager.GetString("CommandProgressMonitor_UpdateProgressInternal_Message__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Waiting.... /// </summary> public static string CommandWaitBroker_UpdateProgress_Waiting___ { get { return ResourceManager.GetString("CommandWaitBroker_UpdateProgress_Waiting___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Done. /// </summary> public static string CommandWaitBroker_Wait_Done { get { return ResourceManager.GetString("CommandWaitBroker_Wait_Done", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Comment { get { object obj = ResourceManager.GetObject("Comment", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Always. /// </summary> public static string CompactFormatOption_ALWAYS_Always { get { return ResourceManager.GetString("CompactFormatOption_ALWAYS_Always", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Never. /// </summary> public static string CompactFormatOption_NEVER_Never { get { return ResourceManager.GetString("CompactFormatOption_NEVER_Never", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only for large files. /// </summary> public static string CompactFormatOption_ONLY_FOR_LARGE_FILES_Only_for_large_files { get { return ResourceManager.GetString("CompactFormatOption_ONLY_FOR_LARGE_FILES_Only_for_large_files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Compare...&gt;. /// </summary> public static string CompareElement_CompareElement__Compare____ { get { return ResourceManager.GetString("CompareElement_CompareElement__Compare____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Documents have different number of transition groups, {0} vs {1}.. /// </summary> public static string ComparePeakBoundaries_ComputeMatches_Documents_have_different_number_of_transition_groups___0__vs__1__ { get { return ResourceManager.GetString("ComparePeakBoundaries_ComputeMatches_Documents_have_different_number_of_transitio" + "n_groups___0__vs__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Number of results in transition group {0} does not match between the two documents. /// </summary> public static string ComparePeakBoundaries_ComputeMatches_Number_of_results_in_transition_group__0__does_not_match_between_the_two_documents { get { return ResourceManager.GetString("ComparePeakBoundaries_ComputeMatches_Number_of_results_in_transition_group__0__do" + "es_not_match_between_the_two_documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing q value for peptide {0} of file {1}. /// </summary> public static string ComparePeakBoundaries_GenerateComparison_Missing_q_value_for_peptide__0__of_file__1_ { get { return ResourceManager.GetString("ComparePeakBoundaries_GenerateComparison_Missing_q_value_for_peptide__0__of_file_" + "_1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Model or File:. /// </summary> public static string ComparePeakBoundariesList_Label__Model_or_File_ { get { return ResourceManager.GetString("ComparePeakBoundariesList_Label__Model_or_File_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Peak Boundary Comparisons. /// </summary> public static string ComparePeakBoundariesList_Title_Edit_Peak_Boundary_Comparisons { get { return ResourceManager.GetString("ComparePeakBoundariesList_Title_Edit_Peak_Boundary_Comparisons", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Observed FPR:. /// </summary> public static string ComparePeakPickingDlg_checkObserved_CheckedChanged_Observed_FPR_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_checkObserved_CheckedChanged_Observed_FPR_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Q value cutoff:. /// </summary> public static string ComparePeakPickingDlg_checkObserved_CheckedChanged_Q_value_cutoff_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_checkObserved_CheckedChanged_Q_value_cutoff_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find the peptide {0} with charge state {1}. /// </summary> public static string ComparePeakPickingDlg_ClickGridViewItem_Unable_to_find_the_peptide__0__with_charge_state__1_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_ClickGridViewItem_Unable_to_find_the_peptide__0__with_charg" + "e_state__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fraction of Manual ID&apos;s. /// </summary> public static string ComparePeakPickingDlg_ComparePeakPickingDlg_Fraction_of_Manual_ID_s { get { return ResourceManager.GetString("ComparePeakPickingDlg_ComparePeakPickingDlg_Fraction_of_Manual_ID_s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fraction of Peak Groups. /// </summary> public static string ComparePeakPickingDlg_ComparePeakPickingDlg_Fraction_of_Peak_Groups { get { return ResourceManager.GetString("ComparePeakPickingDlg_ComparePeakPickingDlg_Fraction_of_Peak_Groups", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Correct Peaks. /// </summary> public static string ComparePeakPickingDlg_ComparePeakPickingDlg_Total_Correct_Peaks { get { return ResourceManager.GetString("ComparePeakPickingDlg_ComparePeakPickingDlg_Total_Correct_Peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add models/files for comparison to see a q-Q plot. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see_a_q_Q_plot { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see" + "_a_q_Q_plot", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add models/files for comparison to see a ROC plot.. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see_a_ROC_plot_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see" + "_a_ROC_plot_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add models/files for comparison to see an analysis of runs. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see_an_analysis_of_runs { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Add_models_files_for_comparison_to_see" + "_an_analysis_of_runs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected False Positive Rate. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Expected_False_Positive_Rate { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Expected_False_Positive_Rate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Observed False Positive Rate. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Observed_False_Positive_Rate { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Observed_False_Positive_Rate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Name. /// </summary> public static string ComparePeakPickingDlg_InitializeGraphPanes_Replicate_Name { get { return ResourceManager.GetString("ComparePeakPickingDlg_InitializeGraphPanes_Replicate_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Refresh {0}. /// </summary> public static string ComparePeakPickingDlg_RefreshDocument_Refresh__0_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_RefreshDocument_Refresh__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Save Model Comparison Data. /// </summary> public static string ComparePeakPickingDlg_SaveData_Save_Model_Comparison_Data { get { return ResourceManager.GetString("ComparePeakPickingDlg_SaveData_Save_Model_Comparison_Data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Either the ROC or Q-q plot tab must be selected to save a graph.. /// </summary> public static string ComparePeakPickingDlg_SaveGraph_Either_the_ROC_or_Q_q_plot_tab_must_be_selected_to_save_a_graph_ { get { return ResourceManager.GetString("ComparePeakPickingDlg_SaveGraph_Either_the_ROC_or_Q_q_plot_tab_must_be_selected_t" + "o_save_a_graph_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Save Model Comparison Graph. /// </summary> public static string ComparePeakPickingDlg_SaveGraph_Save_Model_Comparison_Graph { get { return ResourceManager.GetString("ComparePeakPickingDlg_SaveGraph_Save_Model_Comparison_Graph", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} observed false positive rate. /// </summary> public static string ComparePeakPickingDlg_UpdateGraph__0__observed_false_positive_rate { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph__0__observed_false_positive_rate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} significance threshold. /// </summary> public static string ComparePeakPickingDlg_UpdateGraph__0__significance_threshold { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph__0__significance_threshold", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Equality. /// </summary> public static string ComparePeakPickingDlg_UpdateGraph_Equality { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph_Equality", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Q-Q Comparison. /// </summary> public static string ComparePeakPickingDlg_UpdateGraph_Q_Q_Comparison { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph_Q_Q_Comparison", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Comparison (Observed FPR &lt; {0}). /// </summary> public static string ComparePeakPickingDlg_UpdateGraph_Replicate_Comparison__Observed_FPR____0__ { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph_Replicate_Comparison__Observed_FPR____0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Comparison (q value &lt; {0}). /// </summary> public static string ComparePeakPickingDlg_UpdateGraph_Replicate_Comparison__q_value____0__ { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph_Replicate_Comparison__q_value____0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ROC Plot Comparison. /// </summary> public static string ComparePeakPickingDlg_UpdateGraph_ROC_Plot_Comparison { get { return ResourceManager.GetString("ComparePeakPickingDlg_UpdateGraph_ROC_Plot_Comparison", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation &amp;Voltage Parameters:. /// </summary> public static string CompensationVoltageList_Label_Compensation__Voltage_Parameters_ { get { return ResourceManager.GetString("CompensationVoltageList_Label_Compensation__Voltage_Parameters_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Compensation Voltage Parameter Sets. /// </summary> public static string CompensationVoltageList_Title_Edit_Compensation_Voltage_Parameter_Sets { get { return ResourceManager.GetString("CompensationVoltageList_Title_Edit_Compensation_Voltage_Parameter_Sets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel. /// </summary> public static string ConfigureToolsDlg_AddFromFile_Cancel { get { return ResourceManager.GetString("ConfigureToolsDlg_AddFromFile_Cancel", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must save changes before installing tools. Would you like to save changes?. /// </summary> public static string ConfigureToolsDlg_AddFromFile_You_must_save_changes_before_installing_tools__Would_you_like_to_save_changes_ { get { return ResourceManager.GetString("ConfigureToolsDlg_AddFromFile_You_must_save_changes_before_installing_tools__Woul" + "d_you_like_to_save_changes_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Zip Files. /// </summary> public static string ConfigureToolsDlg_AddFromFile_Zip_Files { get { return ResourceManager.GetString("ConfigureToolsDlg_AddFromFile_Zip_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Contacting the server. /// </summary> public static string ConfigureToolsDlg_AddFromWeb_Contacting_the_server { get { return ResourceManager.GetString("ConfigureToolsDlg_AddFromWeb_Contacting_the_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown error connecting to the tool store. /// </summary> public static string ConfigureToolsDlg_AddFromWeb_Unknown_error_connecting_to_the_tool_store { get { return ResourceManager.GetString("ConfigureToolsDlg_AddFromWeb_Unknown_error_connecting_to_the_tool_store", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All Executables. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_All_Executables { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_All_Executables", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Batch Files. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_Batch_Files { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_Batch_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Command Files. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_Command_Files { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_Command_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Information Files. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_Information_Files { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_Information_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Perl Scripts. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_Perl_Scripts { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_Perl_Scripts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Python Scripts. /// </summary> public static string ConfigureToolsDlg_btnFindCommand_Click_Python_Scripts { get { return ResourceManager.GetString("ConfigureToolsDlg_btnFindCommand_Click_Python_Scripts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you wish to Save changes?. /// </summary> public static string ConfigureToolsDlg_Cancel_Do_you_wish_to_Save_changes_ { get { return ResourceManager.GetString("ConfigureToolsDlg_Cancel_Do_you_wish_to_Save_changes_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Note: if you would like the command to launch a link, make sure to include http:// or https://. /// </summary> public static string ConfigureToolsDlg_CheckPassTool__Note__if_you_would_like_the_command_to_launch_a_link__make_sure_to_include_http____or_https___ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool__Note__if_you_would_like_the_command_to_launch_a_" + "link__make_sure_to_include_http____or_https___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The command for {0} may not exist in that location. Would you like to edit it?. /// </summary> public static string ConfigureToolsDlg_CheckPassTool__The_command_for__0__may_not_exist_in_that_location__Would_you_like_to_edit_it__ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool__The_command_for__0__may_not_exist_in_that_locati" + "on__Would_you_like_to_edit_it__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to If you would like the command to launch a link, make sure to include http:// or https://. /// </summary> public static string ConfigureToolsDlg_CheckPassTool_if_you_would_like_the_command_to_launch_a_link__make_sure_to_include_http____or_https___ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool_if_you_would_like_the_command_to_launch_a_link__m" + "ake_sure_to_include_http____or_https___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Supported Types: {1}. /// </summary> public static string ConfigureToolsDlg_CheckPassTool_Supported_Types___1_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool_Supported_Types___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The command cannot be blank. Please enter a valid command for {0}. /// </summary> public static string ConfigureToolsDlg_CheckPassTool_The_command_cannot_be_blank__please_enter_a_valid_command_for__0_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool_The_command_cannot_be_blank__please_enter_a_valid" + "_command_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The command for {0} must be of a supported type.. /// </summary> public static string ConfigureToolsDlg_CheckPassTool_The_command_for__0__must_be_of_a_supported_type { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool_The_command_for__0__must_be_of_a_supported_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must enter a valid title for the tool.. /// </summary> public static string ConfigureToolsDlg_CheckPassTool_You_must_enter_a_valid_title_for_the_tool { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassTool_You_must_enter_a_valid_title_for_the_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to $(ToolDir) is not a valid macro for a tool that was not installed and therefore does not have a Tool Directory.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to $(ToolDir) is not a valid macro for a tool that was not installed and therefore does not have a Tool Directory.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal__ToolDir__is_not_a_valid_macro_for_a_tool_that_was_not_installed_and_therefore_does_not_have_a_Tool_Directory_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal__ToolDir__is_not_a_valid_macro_for_a_tool" + "_that_was_not_installed_and_therefore_does_not_have_a_Tool_Directory_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a report or remove {0} from arguments.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal_Please_select_a_report_or_remove__0__from_arguments_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal_Please_select_a_report_or_remove__0__from" + "_arguments_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a valid URL.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal_Please_specify_a_valid_URL_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal_Please_specify_a_valid_URL_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tool titles must be unique, please enter a unique title for this tool.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal_Tool_titles_must_be_unique__please_enter_a_unique_title_for_this_tool_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal_Tool_titles_must_be_unique__please_enter_" + "a_unique_title_for_this_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You have provided {0} as an argument but have not selected a report.. /// </summary> public static string ConfigureToolsDlg_CheckPassToolInternal_You_have_provided__0__as_an_argument_but_have_not_selected_a_report_ { get { return ResourceManager.GetString("ConfigureToolsDlg_CheckPassToolInternal_You_have_provided__0__as_an_argument_but_" + "have_not_selected_a_report_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A file named {0} already exists that isn&apos;t identical to the one for tool {1}. /// </summary> public static string ConfigureToolsDlg_CopyinFile_A_file_named_0_already_exists_that_isn_t_identical_to_the_one_for_tool__1 { get { return ResourceManager.GetString("ConfigureToolsDlg_CopyinFile_A_file_named_0_already_exists_that_isn_t_identical_t" + "o_the_one_for_tool__1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing the file {0}. Tool ({1}) Import Failed. /// </summary> public static string ConfigureToolsDlg_CopyinFile_Missing_the_file_0_Tool_1_Import_Failed { get { return ResourceManager.GetString("ConfigureToolsDlg_CopyinFile_Missing_the_file_0_Tool_1_Import_Failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not importing this tool.... /// </summary> public static string ConfigureToolsDlg_CopyinFile_Not_importing_this_tool { get { return ResourceManager.GetString("ConfigureToolsDlg_CopyinFile_Not_importing_this_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to [New Tool{0}]. /// </summary> public static string ConfigureToolsDlg_GetTitle__New_Tool_0__ { get { return ResourceManager.GetString("ConfigureToolsDlg_GetTitle__New_Tool_0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error connecting to the Tool Store: {0}. /// </summary> public static string ConfigureToolsDlg_GetZipFromWeb_Error_connecting_to_the_Tool_Store___0_ { get { return ResourceManager.GetString("ConfigureToolsDlg_GetZipFromWeb_Error_connecting_to_the_Tool_Store___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An annotation with the following name already exists:. /// </summary> public static string ConfigureToolsDlg_OverwriteAnnotations_An_annotation_with_the_following_name_already_exists_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteAnnotations_An_annotation_with_the_following_name_alre" + "ady_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Annotations with the following names already exist:. /// </summary> public static string ConfigureToolsDlg_OverwriteAnnotations_Annotations_with_the_following_names_already_exist_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteAnnotations_Annotations_with_the_following_names_alrea" + "dy_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to overwrite or keep the existing annotations?. /// </summary> public static string ConfigureToolsDlg_OverwriteAnnotations_Do_you_want_to_overwrite_or_keep_the_existing_annotations_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteAnnotations_Do_you_want_to_overwrite_or_keep_the_exist" + "ing_annotations_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Keep Existing. /// </summary> public static string ConfigureToolsDlg_OverwriteAnnotations_Keep_Existing { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteAnnotations_Keep_Existing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you wish to overwrite or install in parallel?. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_overwrite_or_install_in_parallel_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_overwrite_or_install_in_pa" + "rallel_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you wish to overwrite with the older version {0} or install in parallel?. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_overwrite_with_the_older_version__0__or_install_in_parallel_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_overwrite_with_the_older_v" + "ersion__0__or_install_in_parallel_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you wish to reinstall or install in parallel?. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_reinstall_or_install_in_parallel_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_reinstall_or_install_in_pa" + "rallel_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you wish to upgrade to {0} or install in parallel?. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_upgrade_to__0__or_install_in_parallel_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Do_you_wish_to_upgrade_to__0__or_install_" + "in_parallel_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to In Parallel. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_In_Parallel { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_In_Parallel", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overwrite. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Overwrite { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Overwrite", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reinstall. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Reinstall { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Reinstall", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool {0} is already installed.. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_already_installed_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_already_installed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool {0} is currently installed.. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_currently_installed_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_currently_installed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool {0} is in conflict with the new installation. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_in_conflict_with_the_new_installation { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_The_tool__0__is_in_conflict_with_the_new_" + "installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This installation would modify the following reports. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_This_installation_would_modify_the_following_reports { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_This_installation_would_modify_the_follow" + "ing_reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This installation would modify the report titled {0}. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_This_installation_would_modify_the_report_titled__0_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_This_installation_would_modify_the_report" + "_titled__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This is an older installation v{0} of the tool {1}. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_This_is_an_older_installation_v_0__of_the_tool__1_ { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_This_is_an_older_installation_v_0__of_the" + "_tool__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upgrade. /// </summary> public static string ConfigureToolsDlg_OverwriteOrInParallel_Upgrade { get { return ResourceManager.GetString("ConfigureToolsDlg_OverwriteOrInParallel_Upgrade", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Arguments collected at run time. /// </summary> public static string ConfigureToolsDlg_PopulateMacroDropdown_Arguments_collected_at_run_time { get { return ResourceManager.GetString("ConfigureToolsDlg_PopulateMacroDropdown_Arguments_collected_at_run_time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File path to a temporary report. /// </summary> public static string ConfigureToolsDlg_PopulateMacroDropdown_File_path_to_a_temporary_report { get { return ResourceManager.GetString("ConfigureToolsDlg_PopulateMacroDropdown_File_path_to_a_temporary_report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to N/A. /// </summary> public static string ConfigureToolsDlg_PopulateMacroDropdown_N_A { get { return ResourceManager.GetString("ConfigureToolsDlg_PopulateMacroDropdown_N_A", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Command:. /// </summary> public static string ConfigureToolsDlg_textCommand_TextChanged__Command_ { get { return ResourceManager.GetString("ConfigureToolsDlg_textCommand_TextChanged__Command_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Query params:. /// </summary> public static string ConfigureToolsDlg_textCommand_TextChanged__Query_params_ { get { return ResourceManager.GetString("ConfigureToolsDlg_textCommand_TextChanged__Query_params_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A&amp;rguments:. /// </summary> public static string ConfigureToolsDlg_textCommand_TextChanged_A_rguments_ { get { return ResourceManager.GetString("ConfigureToolsDlg_textCommand_TextChanged_A_rguments_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to U&amp;RL:. /// </summary> public static string ConfigureToolsDlg_textCommand_TextChanged_U_RL_ { get { return ResourceManager.GetString("ConfigureToolsDlg_textCommand_TextChanged_U_RL_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue?. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Do_you_want_to_continue_ { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error deleting the directory of the existing tool. Please close anything using that directory and try the overwriting import again later. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Error_deleting_the_directory_of_the_existing_tool__Please_close_anything_using_that_directory_and_try_the_overwriting_import_again_later { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Error_deleting_the_directory_of_the_existing_tool" + "__Please_close_anything_using_that_directory_and_try_the_overwriting_import_agai" + "n_later", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error unpacking zipped tools. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Error_unpacking_zipped_tools { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Error_unpacking_zipped_tools", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to extract the tool from {0}. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Failed_attempting_to_extract_the_tool_from__0_ { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Failed_attempting_to_extract_the_tool_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to process file {0}. The tool described failed to import.. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Failed_to_process_file_0_The_tool_described_failed_to_import { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Failed_to_process_file_0_The_tool_described_faile" + "d_to_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to read file {0}. The tool described failed to import.. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Failed_to_read_file_0_The_tool_described_failed_to_import { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Failed_to_read_file_0_The_tool_described_failed_t" + "o_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to In Parallel. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_In_Parallel { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_In_Parallel", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid file selected. No tools added.. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Invalid_file_selected__No_tools_added_ { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Invalid_file_selected__No_tools_added_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid Tool Description in file {0}.. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Invalid_Tool_Description_in_file__0__ { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Invalid_Tool_Description_in_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overwrite. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Overwrite { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Overwrite", resourceCulture); } } /// <summary> /// Looks up a localized string similar to skipping that tool.. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_skipping_that_tool_ { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_skipping_that_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is a naming conflict. You already installed a tool from a zip folder with the name {0}. Would you like to overwrite or install in parallel?. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_There_is_a_naming_conflict__You_already_installed_a_tool_from_a_zip_folder_with_the_name__0___Would_you_like_to_overwrite_or_install_in_parallel_ { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_There_is_a_naming_conflict__You_already_installed" + "_a_tool_from_a_zip_folder_with_the_name__0___Would_you_like_to_overwrite_or_inst" + "all_in_parallel_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is a naming conflict in unpacking the zip. Tool importing canceled!. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_There_is_a_naming_conflict_in_unpacking_the_zip__Tool_importing_canceled_ { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_There_is_a_naming_conflict_in_unpacking_the_zip__" + "Tool_importing_canceled_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Title and Command are required. /// </summary> public static string ConfigureToolsDlg_unpackZipTool_Title_and_Command_are_required { get { return ResourceManager.GetString("ConfigureToolsDlg_unpackZipTool_Title_and_Command_are_required", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning, overwriting will delete the following tools:. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Warning__overwriting_will_delete_the_following_tools_ { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Warning__overwriting_will_delete_the_following_to" + "ols_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Warning, overwriting will delete the tool {0}. Do you want to continue?. /// </summary> public static string ConfigureToolsDlg_UnpackZipTool_Warning__overwriting_will_delete_the_tool__0___Do_you_want_to_continue_ { get { return ResourceManager.GetString("ConfigureToolsDlg_UnpackZipTool_Warning__overwriting_will_delete_the_tool__0___Do" + "_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Copy { get { object obj = ResourceManager.GetObject("Copy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Copy_Bitmap { get { object obj = ResourceManager.GetObject("Copy_Bitmap", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Copy. /// </summary> public static string CopyEmfToolStripMenuItem_AddToContextMenu_Copy { get { return ResourceManager.GetString("CopyEmfToolStripMenuItem_AddToContextMenu_Copy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Metafile image copied to clipboard. /// </summary> public static string CopyEmfToolStripMenuItem_CopyEmf_Metafile_image_copied_to_clipboard { get { return ResourceManager.GetString("CopyEmfToolStripMenuItem_CopyEmf_Metafile_image_copied_to_clipboard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to copy metafile image to the clipboard.. /// </summary> public static string CopyEmfToolStripMenuItem_CopyEmf_Unable_to_copy_metafile_image_to_the_clipboard { get { return ResourceManager.GetString("CopyEmfToolStripMenuItem_CopyEmf_Unable_to_copy_metafile_image_to_the_clipboard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Copy Metafile. /// </summary> public static string CopyEmfToolStripMenuItem_CopyEmfToolStripMenuItem_Copy_Metafile { get { return ResourceManager.GetString("CopyEmfToolStripMenuItem_CopyEmfToolStripMenuItem_Copy_Metafile", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed setting data to clipboard.. /// </summary> public static string CopyGraphDataToolStripMenuItem_CopyGraphData_Failed_setting_data_to_clipboard { get { return ResourceManager.GetString("CopyGraphDataToolStripMenuItem_CopyGraphData_Failed_setting_data_to_clipboard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Copy Data. /// </summary> public static string CopyGraphDataToolStripMenuItem_CopyGraphDataToolStripMenuItem_Copy_Data { get { return ResourceManager.GetString("CopyGraphDataToolStripMenuItem_CopyGraphDataToolStripMenuItem_Copy_Data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not read the pasted transition list. Transition list must be in separated columns and cannot contain blank lines.. /// </summary> public static string CopyPasteTest_DoTest_Could_not_read_the_pasted_transition_list___Transition_list_must_be_in_separated_columns_and_cannot_contain_blank_lines_ { get { return ResourceManager.GetString("CopyPasteTest_DoTest_Could_not_read_the_pasted_transition_list___Transition_list_" + "must_be_in_separated_columns_and_cannot_contain_blank_lines_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not open web Browser to show link:. /// </summary> public static string Could_not_open_web_Browser_to_show_link_ { get { return ResourceManager.GetString("Could_not_open_web_Browser_to_show_link_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The specified file is not a valid iRT database.. /// </summary> public static string CreateIrtCalculatorDlg_BrowseDb_The_specified_file_is_not_a_valid_iRT_database_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_BrowseDb_The_specified_file_is_not_a_valid_iRT_database_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Transition List (iRT standards). /// </summary> public static string CreateIrtCalculatorDlg_ImportTextFile_Import_Transition_List__iRT_standards_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_ImportTextFile_Import_Transition_List__iRT_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A calculator with that name already exists. Do you want to replace it?. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_A_calculator_with_that_name_already_exists___Do_you_want_to_replace_it_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_A_calculator_with_that_name_already_exists___Do_y" + "ou_want_to_replace_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculator name cannot be empty.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Calculator_name_cannot_be_empty { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Calculator_name_cannot_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot read the database file {0}.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Cannot_read_the_database_file__0_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Cannot_read_the_database_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error reading iRT standards transition list: {0}. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Error_reading_iRT_standards_transition_list___0_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Error_reading_iRT_standards_transition_list___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to open the database file: {0}. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Failed_to_open_the_database_file___0_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Failed_to_open_the_database_file___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT database field must contain a path to a valid file.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_iRT_database_field_must_contain_a_path_to_a_valid_file_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_iRT_database_field_must_contain_a_path_to_a_valid" + "_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT database field must not be empty.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_iRT_database_field_must_not_be_empty_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_iRT_database_field_must_not_be_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a protein containing the list of standard peptides for the iRT calculator.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Please_select_a_protein_containing_the_list_of_standard_peptides_for_the_iRT_calculator_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Please_select_a_protein_containing_the_list_of_st" + "andard_peptides_for_the_iRT_calculator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition list field must contain a path to a valid file.. /// </summary> public static string CreateIrtCalculatorDlg_OkDialog_Transition_list_field_must_contain_a_path_to_a_valid_file_ { get { return ResourceManager.GetString("CreateIrtCalculatorDlg_OkDialog_Transition_list_field_must_contain_a_path_to_a_va" + "lid_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected &apos;{0}&apos;. /// </summary> public static string CrosslinkSequenceParser_Expected_Expected___0__ { get { return ResourceManager.GetString("CrosslinkSequenceParser_Expected_Expected___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to parse &apos;{0}&apos; as a number. /// </summary> public static string CrosslinkSequenceParser_ParseCrosslink_Unable_to_parse___0___as_a_number { get { return ResourceManager.GetString("CrosslinkSequenceParser_ParseCrosslink_Unable_to_parse___0___as_a_number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid peptide sequence. /// </summary> public static string CrosslinkSequenceParser_ParseCrosslinkLibraryKey_Invalid_peptide_sequence { get { return ResourceManager.GetString("CrosslinkSequenceParser_ParseCrosslinkLibraryKey_Invalid_peptide_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion. /// </summary> public static string CustomIon_DisplayName_Ion { get { return ResourceManager.GetString("CustomIon_DisplayName_Ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string CustomMolecule_DisplayName_Molecule { get { return ResourceManager.GetString("CustomMolecule_DisplayName_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom molecules must specify a formula or valid monoisotopic and average masses.. /// </summary> public static string CustomMolecule_Validate_Custom_molecules_must_specify_a_formula_or_valid_monoisotopic_and_average_masses_ { get { return ResourceManager.GetString("CustomMolecule_Validate_Custom_molecules_must_specify_a_formula_or_valid_monoisot" + "opic_and_average_masses_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The mass {0} of the custom molecule exceeds the maximum of {1}.. /// </summary> public static string CustomMolecule_Validate_The_mass__0__of_the_custom_molecule_exceeeds_the_maximum_of__1__ { get { return ResourceManager.GetString("CustomMolecule_Validate_The_mass__0__of_the_custom_molecule_exceeeds_the_maximum_" + "of__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The mass {0} of the custom molecule is less than the minimum of {1}.. /// </summary> public static string CustomMolecule_Validate_The_mass__0__of_the_custom_molecule_is_less_than_the_minimum_of__1__ { get { return ResourceManager.GetString("CustomMolecule_Validate_The_mass__0__of_the_custom_molecule_is_less_than_the_mini" + "mum_of__1__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Cut { get { object obj = ResourceManager.GetObject("Cut", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Dash { get { object obj = ResourceManager.GetObject("Dash", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to {0} points. /// </summary> public static string Data_ToString__0__points { get { return ResourceManager.GetString("Data_ToString__0__points", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The type {0} must have a column of type {1}. /// </summary> public static string Database_GetJoinColumn_The_type__0__must_have_a_column_of_type__1_ { get { return ResourceManager.GetString("Database_GetJoinColumn_The_type__0__must_have_a_column_of_type__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot join tables of same type.. /// </summary> public static string Database_Join_Cannot_join_tables_of_same_type { get { return ResourceManager.GetString("Database_Join_Cannot_join_tables_of_same_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The database for the calculator {0} could not be opened. Check that the file {1} was not moved or deleted.. /// </summary> public static string DatabaseNotConnectedException_DatabaseNotConnectedException_The_database_for_the_calculator__0__could_not_be_opened__Check_that_the_file__1__was_not_moved_or_deleted { get { return ResourceManager.GetString("DatabaseNotConnectedException_DatabaseNotConnectedException_The_database_for_the_" + "calculator__0__could_not_be_opened__Check_that_the_file__1__was_not_moved_or_del" + "eted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show Heat Map. /// </summary> public static string DataboundGridControl_DataboundGridControl_Show_Heat_Map { get { return ResourceManager.GetString("DataboundGridControl_DataboundGridControl_Show_Heat_Map", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show PCA Plot. /// </summary> public static string DataboundGridControl_DataboundGridControl_Show_PCA_Plot { get { return ResourceManager.GetString("DataboundGridControl_DataboundGridControl_Show_PCA_Plot", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occured while displaying the data rows:. /// </summary> public static string DataboundGridControl_DisplayError_An_error_occured_while_displaying_the_data_rows_ { get { return ResourceManager.GetString("DataboundGridControl_DisplayError_An_error_occured_while_displaying_the_data_rows" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue to see these error messages?. /// </summary> public static string DataboundGridControl_DisplayError_Do_you_want_to_continue_to_see_these_error_messages_ { get { return ResourceManager.GetString("DataboundGridControl_DisplayError_Do_you_want_to_continue_to_see_these_error_mess" + "ages_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error setting value:. /// </summary> public static string DataboundGridControl_DoFillDown_Error_setting_value_ { get { return ResourceManager.GetString("DataboundGridControl_DoFillDown_Error_setting_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Filling {0}/{1} rows. /// </summary> public static string DataboundGridControl_DoFillDown_Filling__0___1__rows { get { return ResourceManager.GetString("DataboundGridControl_DoFillDown_Filling__0___1__rows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fill Down. /// </summary> public static string DataboundGridControl_FillDown_Fill_Down { get { return ResourceManager.GetString("DataboundGridControl_FillDown_Fill_Down", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occured while performing clustering.. /// </summary> public static string DataboundGridControl_GetClusteredResults_An_error_occured_while_performing_clustering_ { get { return ResourceManager.GetString("DataboundGridControl_GetClusteredResults_An_error_occured_while_performing_cluste" + "ring_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to choose a set of columns to use for hierarchical clustering.. /// </summary> public static string DataboundGridControl_GetClusteredResults_Unable_to_choose_a_set_of_columns_to_use_for_hierarchical_clustering_ { get { return ResourceManager.GetString("DataboundGridControl_GetClusteredResults_Unable_to_choose_a_set_of_columns_to_use" + "_for_hierarchical_clustering_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cleared {0}/{1} rows. /// </summary> public static string DataGridViewPasteHandler_ClearCells_Cleared__0___1__rows { get { return ResourceManager.GetString("DataGridViewPasteHandler_ClearCells_Cleared__0___1__rows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Clear cells. /// </summary> public static string DataGridViewPasteHandler_DataGridViewOnKeyDown_Clear_cells { get { return ResourceManager.GetString("DataGridViewPasteHandler_DataGridViewOnKeyDown_Clear_cells", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste. /// </summary> public static string DataGridViewPasteHandler_DataGridViewOnKeyDown_Paste { get { return ResourceManager.GetString("DataGridViewPasteHandler_DataGridViewOnKeyDown_Paste", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating document settings to match edits.. /// </summary> public static string DataGridViewPasteHandler_EndDeferSettingsChangesOnDocument_Updating_settings { get { return ResourceManager.GetString("DataGridViewPasteHandler_EndDeferSettingsChangesOnDocument_Updating_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pasting row {0}. /// </summary> public static string DataGridViewPasteHandler_Paste_Pasting_row__0_ { get { return ResourceManager.GetString("DataGridViewPasteHandler_Paste_Pasting_row__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error converting &apos;{0}&apos; to required type: {1}. /// </summary> public static string DataGridViewPasteHandler_TryConvertValue_Error_converting___0___to_required_type___1_ { get { return ResourceManager.GetString("DataGridViewPasteHandler_TryConvertValue_Error_converting___0___to_required_type_" + "__1_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap DataProcessing { get { object obj = ResourceManager.GetObject("DataProcessing", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to The URI {0} is not well formed.. /// </summary> public static string DataSettings_ChangePanoramaPublishUri_The_URI__0__is_not_well_formed_ { get { return ResourceManager.GetString("DataSettings_ChangePanoramaPublishUri_The_URI__0__is_not_well_formed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to read sample information from the file {0}.. /// </summary> public static string DataSourceUtil_GetWiffSubPaths_An_error_occurred_attempting_to_read_sample_information_from_the_file__0__ { get { return ResourceManager.GetString("DataSourceUtil_GetWiffSubPaths_An_error_occurred_attempting_to_read_sample_inform" + "ation_from_the_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file may be corrupted, missing, or the correct libraries may not be installed.. /// </summary> public static string DataSourceUtil_GetWiffSubPaths_The_file_may_be_corrupted_missing_or_the_correct_libraries_may_not_be_installed { get { return ResourceManager.GetString("DataSourceUtil_GetWiffSubPaths_The_file_may_be_corrupted_missing_or_the_correct_l" + "ibraries_may_not_be_installed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding iRT values for imported peptides. /// </summary> public static string DbIrtPeptide_FindNonConflicts_Adding_iRT_values_for_imported_peptides { get { return ResourceManager.GetString("DbIrtPeptide_FindNonConflicts_Adding_iRT_values_for_imported_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimization type out of range. /// </summary> public static string DbOptimization_DbOptimization_Optimization_type_out_of_range { get { return ResourceManager.GetString("DbOptimization_DbOptimization_Optimization_type_out_of_range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Enzymes file {0} not found. /// </summary> public static string DdaSearch_MSAmandaSearchWrapper_enzymes_file__0__not_found { get { return ResourceManager.GetString("DdaSearch_MSAmandaSearchWrapper_enzymes_file__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Instruments file {0} not found. /// </summary> public static string DdaSearch_MSAmandaSearchWrapper_Instruments_file_not_found { get { return ResourceManager.GetString("DdaSearch_MSAmandaSearchWrapper_Instruments_file_not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Obo files (psi-ms.obo and unimod.obo) not found. /// </summary> public static string DdaSearch_MSAmandaSearchWrapper_Obo_files_not_found { get { return ResourceManager.GetString("DdaSearch_MSAmandaSearchWrapper_Obo_files_not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unimod file {0} not found. /// </summary> public static string DdaSearch_MSAmandaSearchWrapper_unimod_file__0__not_found { get { return ResourceManager.GetString("DdaSearch_MSAmandaSearchWrapper_unimod_file__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Search failed: {0}. /// </summary> public static string DdaSearch_Search_failed__0 { get { return ResourceManager.GetString("DdaSearch_Search_failed__0", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Search is being canceled.. /// </summary> public static string DdaSearch_Search_is_canceled { get { return ResourceManager.GetString("DdaSearch_Search_is_canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fragment ions must be selected. /// </summary> public static string DdaSearch_SearchSettingsControl_Fragment_ions_must_be_selected { get { return ResourceManager.GetString("DdaSearch_SearchSettingsControl_Fragment_ions_must_be_selected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MS1 Tolerance incorrect. /// </summary> public static string DdaSearch_SearchSettingsControl_MS1_Tolerance_incorrect { get { return ResourceManager.GetString("DdaSearch_SearchSettingsControl_MS1_Tolerance_incorrect", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MS2 Tolerance incorrect. /// </summary> public static string DdaSearch_SearchSettingsControl_MS2_Tolerance_incorrect { get { return ResourceManager.GetString("DdaSearch_SearchSettingsControl_MS2_Tolerance_incorrect", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Search canceled.. /// </summary> public static string DDASearchControl_SearchProgress_Search_canceled { get { return ResourceManager.GetString("DDASearchControl_SearchProgress_Search_canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Search done.. /// </summary> public static string DDASearchControl_SearchProgress_Search_done { get { return ResourceManager.GetString("DDASearchControl_SearchProgress_Search_done", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Search failed.. /// </summary> public static string DDASearchControl_SearchProgress_Search_failed { get { return ResourceManager.GetString("DDASearchControl_SearchProgress_Search_failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Starting search.... /// </summary> public static string DDASearchControl_SearchProgress_Starting_search { get { return ResourceManager.GetString("DDASearchControl_SearchProgress_Starting_search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Declustering Potential Regressions:. /// </summary> public static string DeclusterPotentialList_Label_Declustering_Potential_Regressions { get { return ResourceManager.GetString("DeclusterPotentialList_Label_Declustering_Potential_Regressions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Declustering Potential Regressions. /// </summary> public static string DeclusterPotentialList_Title_Edit_Declustering_Potential_Regressions { get { return ResourceManager.GetString("DeclusterPotentialList_Title_Edit_Declustering_Potential_Regressions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fast Overlap (Experimental). /// </summary> public static string DeconvolutionMethod_FAST_OVERLAP_Fast_Overlap { get { return ResourceManager.GetString("DeconvolutionMethod_FAST_OVERLAP_Fast_Overlap", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overlap and MSX. /// </summary> public static string DeconvolutionMethod_MSX_OVERLAP_Overlap_and_MSX { get { return ResourceManager.GetString("DeconvolutionMethod_MSX_OVERLAP_Overlap_and_MSX", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Random Mass Shift. /// </summary> public static string DecoyGeneration_ADD_RANDOM_Random_Mass_Shift { get { return ResourceManager.GetString("DecoyGeneration_ADD_RANDOM_Random_Mass_Shift", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reverse Sequence. /// </summary> public static string DecoyGeneration_REVERSE_SEQUENCE_Reverse_Sequence { get { return ResourceManager.GetString("DecoyGeneration_REVERSE_SEQUENCE_Reverse_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shuffle Sequence. /// </summary> public static string DecoyGeneration_SHUFFLE_SEQUENCE_Shuffle_Sequence { get { return ResourceManager.GetString("DecoyGeneration_SHUFFLE_SEQUENCE_Shuffle_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose a type for this annotation to apply to.. /// </summary> public static string DefineAnnotationDlg_OkDialog_Choose_a_type_for_this_annotation_to_apply_to_ { get { return ResourceManager.GetString("DefineAnnotationDlg_OkDialog_Choose_a_type_for_this_annotation_to_apply_to_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose a value for this annotation.. /// </summary> public static string DefineAnnotationDlg_OkDialog_Choose_a_value_for_this_annotation_ { get { return ResourceManager.GetString("DefineAnnotationDlg_OkDialog_Choose_a_value_for_this_annotation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose at least one type for this annotation to apply to.. /// </summary> public static string DefineAnnotationDlg_OkDialog_Choose_at_least_one_type_for_this_annotation_to_apply_to { get { return ResourceManager.GetString("DefineAnnotationDlg_OkDialog_Choose_at_least_one_type_for_this_annotation_to_appl" + "y_to", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already an annotation defined named &apos;{0}&apos;.. /// </summary> public static string DefineAnnotationDlg_OkDialog_There_is_already_an_annotation_defined_named__0__ { get { return ResourceManager.GetString("DefineAnnotationDlg_OkDialog_There_is_already_an_annotation_defined_named__0__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Delete { get { object obj = ResourceManager.GetObject("Delete", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Delete Molecules.... /// </summary> public static string DeletePeptides_GetMenuItemText_Delete_Molecules___ { get { return ResourceManager.GetString("DeletePeptides_GetMenuItemText_Delete_Molecules___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete Peptides.... /// </summary> public static string DeletePeptides_MenuItemText_Delete_Peptides___ { get { return ResourceManager.GetString("DeletePeptides_MenuItemText_Delete_Peptides___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete Precursors.... /// </summary> public static string DeletePrecursors_MenuItemText_Delete_Precursors___ { get { return ResourceManager.GetString("DeletePrecursors_MenuItemText_Delete_Precursors___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete Molecule Lists.... /// </summary> public static string DeleteProteins_MenuItemText_Delete_Molecule_Lists___ { get { return ResourceManager.GetString("DeleteProteins_MenuItemText_Delete_Molecule_Lists___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete Proteins.... /// </summary> public static string DeleteProteins_MenuItemText_Delete_Proteins___ { get { return ResourceManager.GetString("DeleteProteins_MenuItemText_Delete_Proteins___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete Transitions.... /// </summary> public static string DeleteTransitions_MenuItemText_Delete_Transitions___ { get { return ResourceManager.GetString("DeleteTransitions_MenuItemText_Delete_Transitions___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The Isolation Scheme in Full Scan Settings is Inconsistently Specified.. /// </summary> public static string Demultiplexer_GetDeconvRegionsForMz_TheIsolationSchemeIsInconsistentlySpecified { get { return ResourceManager.GetString("Demultiplexer_GetDeconvRegionsForMz_TheIsolationSchemeIsInconsistentlySpecified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Count. /// </summary> public static string DetectionHistogramPane_Tooltip_Count { get { return ResourceManager.GetString("DetectionHistogramPane_Tooltip_Count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Count. /// </summary> public static string DetectionHistogramPane_Tooltip_ReplicateCount { get { return ResourceManager.GetString("DetectionHistogramPane_Tooltip_ReplicateCount", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Count. /// </summary> public static string DetectionHistogramPane_XAxis_Name { get { return ResourceManager.GetString("DetectionHistogramPane_XAxis_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Frequency. /// </summary> public static string DetectionHistogramPane_YAxis_Name { get { return ResourceManager.GetString("DetectionHistogramPane_YAxis_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string DetectionPlot_TargetType_Peptide { get { return ResourceManager.GetString("DetectionPlot_TargetType_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor. /// </summary> public static string DetectionPlot_TargetType_Precursor { get { return ResourceManager.GetString("DetectionPlot_TargetType_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Count. /// </summary> public static string DetectionPlot_YScale_One { get { return ResourceManager.GetString("DetectionPlot_YScale_One", resourceCulture); } } /// <summary> /// Looks up a localized string similar to % of all {0}s. /// </summary> public static string DetectionPlot_YScale_Percent { get { return ResourceManager.GetString("DetectionPlot_YScale_Percent", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &quot;Data retrieved successfully.&quot;. /// </summary> public static string DetectionPlotData_DataRetrieved_Label { get { return ResourceManager.GetString("DetectionPlotData_DataRetrieved_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid Q-Value. Cannot be 0 or 1.. /// </summary> public static string DetectionPlotData_InvalidQValue_Label { get { return ResourceManager.GetString("DetectionPlotData_InvalidQValue_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No data loaded.. /// </summary> public static string DetectionPlotData_NoDataLoaded_Label { get { return ResourceManager.GetString("DetectionPlotData_NoDataLoaded_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document has no Q-Values. Train your mProphet model.. /// </summary> public static string DetectionPlotData_NoQValuesInDocument_Label { get { return ResourceManager.GetString("DetectionPlotData_NoQValuesInDocument_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document has no peptides or chromatograms.. /// </summary> public static string DetectionPlotData_NoResults_Label { get { return ResourceManager.GetString("DetectionPlotData_NoResults_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use properties dialog or modify the document to update the plot.. /// </summary> public static string DetectionPlotData_UsePropertiesDialog_Label { get { return ResourceManager.GetString("DetectionPlotData_UsePropertiesDialog_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &quot;Waiting for the document to load.&quot;. /// </summary> public static string DetectionPlotData_WaitingForDocumentLoad_Label { get { return ResourceManager.GetString("DetectionPlotData_WaitingForDocumentLoad_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to all runs. /// </summary> public static string DetectionPlotPane_AllRunsLine_Name { get { return ResourceManager.GetString("DetectionPlotPane_AllRunsLine_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to at least {0} (of {1}) - {2:#,##0}. /// </summary> public static string DetectionPlotPane_AtLeastLine_Name { get { return ResourceManager.GetString("DetectionPlotPane_AtLeastLine_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to cumulative. /// </summary> public static string DetectionPlotPane_CumulativeLine_Name { get { return ResourceManager.GetString("DetectionPlotPane_CumulativeLine_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No data available for this plot.. /// </summary> public static string DetectionPlotPane_EmptyPlot_Label { get { return ResourceManager.GetString("DetectionPlotPane_EmptyPlot_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Operation canceled.. /// </summary> public static string DetectionPlotPane_EmptyPlotCanceled_Label { get { return ResourceManager.GetString("DetectionPlotPane_EmptyPlotCanceled_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error when retrieving plot data.. /// </summary> public static string DetectionPlotPane_EmptyPlotError_Label { get { return ResourceManager.GetString("DetectionPlotPane_EmptyPlotError_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mean: {0:#,##0}. /// </summary> public static string DetectionPlotPane_Label_Mean { get { return ResourceManager.GetString("DetectionPlotPane_Label_Mean", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Stddev:{1:#,##0}. /// </summary> public static string DetectionPlotPane_Label_Stddev { get { return ResourceManager.GetString("DetectionPlotPane_Label_Stddev", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All Count. /// </summary> public static string DetectionPlotPane_Tooltip_AllCount { get { return ResourceManager.GetString("DetectionPlotPane_Tooltip_AllCount", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Count. /// </summary> public static string DetectionPlotPane_Tooltip_Count { get { return ResourceManager.GetString("DetectionPlotPane_Tooltip_Count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cumulative Count. /// </summary> public static string DetectionPlotPane_Tooltip_CumulativeCount { get { return ResourceManager.GetString("DetectionPlotPane_Tooltip_CumulativeCount", resourceCulture); } } /// <summary> /// Looks up a localized string similar to -log10 of Q-Value Median. /// </summary> public static string DetectionPlotPane_Tooltip_QMedian { get { return ResourceManager.GetString("DetectionPlotPane_Tooltip_QMedian", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate. /// </summary> public static string DetectionPlotPane_Tooltip_Replicate { get { return ResourceManager.GetString("DetectionPlotPane_Tooltip_Replicate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating (Esc to cancel).... /// </summary> public static string DetectionPlotPane_WaitingForData_Label { get { return ResourceManager.GetString("DetectionPlotPane_WaitingForData_Label", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate. /// </summary> public static string DetectionPlotPane_XAxis_Name { get { return ResourceManager.GetString("DetectionPlotPane_XAxis_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Detections ({0}). /// </summary> public static string DetectionPlotPane_YAxis_Name { get { return ResourceManager.GetString("DetectionPlotPane_YAxis_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to At least {0} replicates. /// </summary> public static string DetectionToolbarProperties_AtLeastNReplicates { get { return ResourceManager.GetString("DetectionToolbarProperties_AtLeastNReplicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extraction Windows. /// </summary> public static string DiaIsolationWindowsGraphForm_checkBox1_CheckedChanged_Extraction_Windows { get { return ResourceManager.GetString("DiaIsolationWindowsGraphForm_checkBox1_CheckedChanged_Extraction_Windows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cycle. /// </summary> public static string DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_Cycle { get { return ResourceManager.GetString("DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_Cycle", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z. /// </summary> public static string DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_m_z { get { return ResourceManager.GetString("DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measurement Windows. /// </summary> public static string DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_Measurement_Windows { get { return ResourceManager.GetString("DiaIsolationWindowsGraphForm_DiaIsolationWindowsGraphForm_Measurement_Windows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Digesting {0} proteome with {1}. /// </summary> public static string DigestHelper_Digest_Digesting__0__proteome_with__1__ { get { return ResourceManager.GetString("DigestHelper_Digest_Digesting__0__proteome_with__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to access internet to resolve protein details.. /// </summary> public static string DigestHelper_LookupProteinMetadata_Unable_to_access_internet_to_resolve_protein_details_ { get { return ResourceManager.GetString("DigestHelper_LookupProteinMetadata_Unable_to_access_internet_to_resolve_protein_d" + "etails_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to maximum missed cleavages. /// </summary> public static string DigestSettings_Validate_maximum_missed_cleavages { get { return ResourceManager.GetString("DigestSettings_Validate_maximum_missed_cleavages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {1} for {0} must be between {2} and {3}.. /// </summary> public static string DigestSettings_ValidateIntRange_The_value__1__for__0__must_be_between__2__and__3__ { get { return ResourceManager.GetString("DigestSettings_ValidateIntRange_The_value__1__for__0__must_be_between__2__and__3_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Source directory does not exist or could not be found: . /// </summary> public static string DirectoryEx_DirectoryCopy_Source_directory_does_not_exist_or_could_not_be_found__ { get { return ResourceManager.GetString("DirectoryEx_DirectoryCopy_Source_directory_does_not_exist_or_could_not_be_found__" + "", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap directoryicon { get { object obj = ResourceManager.GetObject("directoryicon", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to N/A. /// </summary> public static string DisplayEquation_N_A { get { return ResourceManager.GetString("DisplayEquation_N_A", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Column. /// </summary> public static string DisplayGraphsTypeExtension_LOCALIZED_VALUES_Column { get { return ResourceManager.GetString("DisplayGraphsTypeExtension_LOCALIZED_VALUES_Column", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Row. /// </summary> public static string DisplayGraphsTypeExtension_LOCALIZED_VALUES_Row { get { return ResourceManager.GetString("DisplayGraphsTypeExtension_LOCALIZED_VALUES_Row", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tiled. /// </summary> public static string DisplayGraphsTypeExtension_LOCALIZED_VALUES_Tiled { get { return ResourceManager.GetString("DisplayGraphsTypeExtension_LOCALIZED_VALUES_Tiled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Full Name. /// </summary> public static string DisplayModificationOption_FULL_NAME_Full_Name { get { return ResourceManager.GetString("DisplayModificationOption_FULL_NAME_Full_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Difference. /// </summary> public static string DisplayModificationOption_MASS_DELTA_Mass_Difference { get { return ResourceManager.GetString("DisplayModificationOption_MASS_DELTA_Mass_Difference", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not Shown. /// </summary> public static string DisplayModificationOption_NOT_SHOWN_Not_Shown { get { return ResourceManager.GetString("DisplayModificationOption_NOT_SHOWN_Not_Shown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Three Letter Code. /// </summary> public static string DisplayModificationOption_THREE_LETTER_CODE_Three_Letter_Code { get { return ResourceManager.GetString("DisplayModificationOption_THREE_LETTER_CODE_Three_Letter_Code", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unimod ID. /// </summary> public static string DisplayModificationOption_UNIMOD_ID_Unimod_ID { get { return ResourceManager.GetString("DisplayModificationOption_UNIMOD_ID_Unimod_ID", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Index {0} exceeds length {1}. /// </summary> public static string DocNodeParent_GetPathTo_Index__0__exceeds_length__1__ { get { return ResourceManager.GetString("DocNodeParent_GetPathTo_Index__0__exceeds_length__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Node reported {0} descendants at depth {1}, but found only {2}.. /// </summary> public static string DocNodeParent_GetPathTo_Node_reported__0__descendants_at_depth__1__but_found_only__2__ { get { return ResourceManager.GetString("DocNodeParent_GetPathTo_Node_reported__0__descendants_at_depth__1__but_found_only" + "__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Msx. /// </summary> public static string DoconvolutionMethod_MSX_Msx { get { return ResourceManager.GetString("DoconvolutionMethod_MSX_Msx", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string DoconvolutionMethod_NONE_None { get { return ResourceManager.GetString("DoconvolutionMethod_NONE_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overlap. /// </summary> public static string DoconvolutionMethod_OVERLAP_Overlap { get { return ResourceManager.GetString("DoconvolutionMethod_OVERLAP_Overlap", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Annotation &apos;{0}&apos; does not apply to element &apos;{1}&apos;.. /// </summary> public static string DocumentAnnotations_AnnotationDoesNotApplyException_Annotation___0___does_not_apply_to_element___1___ { get { return ResourceManager.GetString("DocumentAnnotations_AnnotationDoesNotApplyException_Annotation___0___does_not_app" + "ly_to_element___1___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The element &apos;{0}&apos; cannot have annotations.. /// </summary> public static string DocumentAnnotations_AnnotationsNotSupported_The_element___0___cannot_have_annotations_ { get { return ResourceManager.GetString("DocumentAnnotations_AnnotationsNotSupported_The_element___0___cannot_have_annotat" + "ions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find element &apos;{0}&apos;.. /// </summary> public static string DocumentAnnotations_ElementNotFoundException_Could_not_find_element___0___ { get { return ResourceManager.GetString("DocumentAnnotations_ElementNotFoundException_Could_not_find_element___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating calculated annotations. /// </summary> public static string DocumentAnnotationUpdater_UpdateAnnotations_Updating_calculated_annotations { get { return ResourceManager.GetString("DocumentAnnotationUpdater_UpdateAnnotations_Updating_calculated_annotations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Report. /// </summary> public static string DocumentGridViewContext_CreateViewEditor_Edit_Report { get { return ResourceManager.GetString("DocumentGridViewContext_CreateViewEditor_Edit_Report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the {0} &apos;{1}&apos;?. /// </summary> public static string DocumentGridViewContext_Delete_Are_you_sure_you_want_to_delete_the__0____1___ { get { return ResourceManager.GetString("DocumentGridViewContext_Delete_Are_you_sure_you_want_to_delete_the__0____1___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} {1}?. /// </summary> public static string DocumentGridViewContext_Delete_Are_you_sure_you_want_to_delete_these__0___1__ { get { return ResourceManager.GetString("DocumentGridViewContext_Delete_Are_you_sure_you_want_to_delete_these__0___1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preview: {0}. /// </summary> public static string DocumentGridViewContext_Preview_Preview___0_ { get { return ResourceManager.GetString("DocumentGridViewContext_Preview_Preview___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preview New Report. /// </summary> public static string DocumentGridViewContext_Preview_Preview_New_Report { get { return ResourceManager.GetString("DocumentGridViewContext_Preview_Preview_New_Report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete those items from those lists?. /// </summary> public static string DocumentSettingsDlg_OkDialog_Are_you_sure_you_want_to_delete_those_items_from_those_lists_ { get { return ResourceManager.GetString("DocumentSettingsDlg_OkDialog_Are_you_sure_you_want_to_delete_those_items_from_tho" + "se_lists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List &apos;{0}&apos; with {1} items. /// </summary> public static string DocumentSettingsDlg_OkDialog_List___0___with__1__items { get { return ResourceManager.GetString("DocumentSettingsDlg_OkDialog_List___0___with__1__items", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following lists have items in them which will be deleted when you remove the lists from your document:. /// </summary> public static string DocumentSettingsDlg_OkDialog_The_following_lists_have_items_in_them_which_will_be_deleted_when_you_remove_the_lists_from_your_document_ { get { return ResourceManager.GetString("DocumentSettingsDlg_OkDialog_The_following_lists_have_items_in_them_which_will_be" + "_deleted_when_you_remove_the_lists_from_your_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The list &apos;{0}&apos; has {1} items in it. If you remove that list from your document, those items will be deleted. Are you sure you want to delete those items from that list?. /// </summary> public static string DocumentSettingsDlg_OkDialog_The_list___0___has__1__items_in_it__If_you_remove_that_list_from_your_document__those_items_will_be_deleted__Are_you_sure_you_want_to_delete_those_items_from_that_list_ { get { return ResourceManager.GetString("DocumentSettingsDlg_OkDialog_The_list___0___has__1__items_in_it__If_you_remove_th" + "at_list_from_your_document__those_items_will_be_deleted__Are_you_sure_you_want_t" + "o_delete_those_items_from_that_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while applying the rule set &apos;{0}&apos;. Do you want to continue with the change to the Document Settings?. /// </summary> public static string DocumentSettingsDlg_ValidateMetadataRules_An_error_occurred_while_applying_the_rule___0____Do_you_want_to_continue_with_the_change_to_the_Document_Settings_ { get { return ResourceManager.GetString("DocumentSettingsDlg_ValidateMetadataRules_An_error_occurred_while_applying_the_ru" + "le___0____Do_you_want_to_continue_with_the_change_to_the_Document_Settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to convert crosslinks in {0} to document format {1}.. /// </summary> public static string DocumentWriter_WritePeptideXml_Unable_to_convert_crosslinks_in__0__to_document_format__1__ { get { return ResourceManager.GetString("DocumentWriter_WritePeptideXml_Unable_to_convert_crosslinks_in__0__to_document_fo" + "rmat__1__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap down_pro32 { get { object obj = ResourceManager.GetObject("down_pro32", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Resolving power must be greater than 0.. /// </summary> public static string DriftTimePredictor_Validate_Resolving_power_must_be_greater_than_0_ { get { return ResourceManager.GetString("DriftTimePredictor_Validate_Resolving_power_must_be_greater_than_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixed window width must be non negative.. /// </summary> public static string DriftTimeWindowWidthCalculator_Validate_Fixed_window_width_must_be_non_negative_ { get { return ResourceManager.GetString("DriftTimeWindowWidthCalculator_Validate_Fixed window_width_must_be_non_negative_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak width must be non-negative.. /// </summary> public static string DriftTimeWindowWidthCalculator_Validate_Peak_width_must_be_non_negative_ { get { return ResourceManager.GetString("DriftTimeWindowWidthCalculator_Validate_Peak_width_must_be_non_negative_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap DropImage { get { object obj = ResourceManager.GetObject("DropImage", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Line {0} has {1} fields when {2} expected.. /// </summary> public static string DsvFileReader_ReadLine_Line__0__has__1__fields_when__2__expected_ { get { return ResourceManager.GetString("DsvFileReader_ReadLine_Line__0__has__1__fields_when__2__expected_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Edit_Redo { get { object obj = ResourceManager.GetObject("Edit_Redo", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Edit_Undo { get { object obj = ResourceManager.GetObject("Edit_Undo", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Edit_Undo_Multiple { get { object obj = ResourceManager.GetObject("Edit_Undo_Multiple", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Measured results must be completely loaded before they can be used to create a collision energy regression.. /// </summary> public static string EditCEDlg_GetRegressionDatas_Measured_results_must_be_completely_loaded_before_they_can_be_used_to_create_a_collision_energy_regression { get { return ResourceManager.GetString("EditCEDlg_GetRegressionDatas_Measured_results_must_be_completely_loaded_before_th" + "ey_can_be_used_to_create_a_collision_energy_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision energy regressions require at least one regression function.. /// </summary> public static string EditCEDlg_OkDialog_Collision_energy_regressions_require_at_least_one_regression_function { get { return ResourceManager.GetString("EditCEDlg_OkDialog_Collision_energy_regressions_require_at_least_one_regression_f" + "unction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The collision energy regression &apos;{0}&apos; already exists.. /// </summary> public static string EditCEDlg_OkDialog_The_collision_energy_regression__0__already_exists { get { return ResourceManager.GetString("EditCEDlg_OkDialog_The_collision_energy_regression__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision Energy. /// </summary> public static string EditCEDlg_ShowGraph_Collision_Energy { get { return ResourceManager.GetString("EditCEDlg_ShowGraph_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision Energy Regression Charge {0}. /// </summary> public static string EditCEDlg_ShowGraph_Collision_Energy_Regression_Charge__0__ { get { return ResourceManager.GetString("EditCEDlg_ShowGraph_Collision_Energy_Regression_Charge__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string EditCEDlg_ShowGraph_Precursor_m_z { get { return ResourceManager.GetString("EditCEDlg_ShowGraph_Precursor_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insufficient data found to calculate a new regression.. /// </summary> public static string EditCEDlg_UseCurrentData_Insufficient_data_found_to_calculate_a_new_regression { get { return ResourceManager.GetString("EditCEDlg_UseCurrentData_Insufficient_data_found_to_calculate_a_new_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A value is required.. /// </summary> public static string EditCEDlg_ValidateCell_A_value_is_required { get { return ResourceManager.GetString("EditCEDlg_ValidateCell_A_value_is_required", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The entry {0} is not valid.. /// </summary> public static string EditCEDlg_ValidateCell_The_entry__0__is_not_valid { get { return ResourceManager.GetString("EditCEDlg_ValidateCell_The_entry__0__is_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The entry &apos;{0}&apos; is not a valid charge. Precursor charges must be between 1 and 5.. /// </summary> public static string EditCEDlg_ValidateCharge_The_entry__0__is_not_a_valid_charge_Precursor_charges_must_be_between_1_and_5 { get { return ResourceManager.GetString("EditCEDlg_ValidateCharge_The_entry__0__is_not_a_valid_charge_Precursor_charges_mu" + "st_be_between_1_and_5", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0}, {1}. /// </summary> public static string EditCEDlg_ValidateRegressionCellValues_On_line__0__1__ { get { return ResourceManager.GetString("EditCEDlg_ValidateRegressionCellValues_On_line__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid charge. Charges must be integer values between 1 and 5.. /// </summary> public static string EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_charge_Charges_must_be_integer_values_between_1_and_5 { get { return ResourceManager.GetString("EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_charge_Charge" + "s_must_be_integer_values_between_1_and_5", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid intercept.. /// </summary> public static string EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_intercept { get { return ResourceManager.GetString("EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_intercept", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid slope.. /// </summary> public static string EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_slope { get { return ResourceManager.GetString("EditCEDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_slope", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Maximum compensation voltage cannot be less than minimum compensation volatage.. /// </summary> public static string EditCoVDlg_btnOk_Click_Maximum_compensation_voltage_cannot_be_less_than_minimum_compensation_volatage_ { get { return ResourceManager.GetString("EditCoVDlg_btnOk_Click_Maximum_compensation_voltage_cannot_be_less_than_minimum_c" + "ompensation_volatage_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The compensation voltage parameters &apos;{0}&apos; already exist.. /// </summary> public static string EditCoVDlg_btnOk_Click_The_compensation_voltage_parameters___0___already_exist_ { get { return ResourceManager.GetString("EditCoVDlg_btnOk_Click_The_compensation_voltage_parameters___0___already_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Monoisotopic m/z:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg__Monoisotopic_m_z_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg__Monoisotopic_m_z_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Monoisotopic mass:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg__Monoisotopic_mass_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg__Monoisotopic_mass_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A&amp;verage m/z:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg_A_verage_m_z_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg_A_verage_m_z_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A&amp;verage mass:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg_A_verage_mass_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg_A_verage_mass_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Addu&amp;ct for {0}:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg_Addu_ct_for__0__ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg_Addu_ct_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chemi&amp;cal formula:. /// </summary> public static string EditCustomMoleculeDlg_EditCustomMoleculeDlg_Chemi_cal_formula_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_EditCustomMoleculeDlg_Chemi_cal_formula_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A precursor with that adduct and label type already exists.. /// </summary> public static string EditCustomMoleculeDlg_OkDialog_A_precursor_with_that_adduct_and_label_type_already_exists_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_OkDialog_A_precursor_with_that_adduct_and_label_type_alread" + "y_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A similar transition already exists.. /// </summary> public static string EditCustomMoleculeDlg_OkDialog_A_similar_transition_already_exists_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_OkDialog_A_similar_transition_already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom molecules must have a mass greater than or equal to {0}.. /// </summary> public static string EditCustomMoleculeDlg_OkDialog_Custom_molecules_must_have_a_mass_greater_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_OkDialog_Custom_molecules_must_have_a_mass_greater_than_or_" + "equal_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom molecules must have a mass less than or equal to {0}.. /// </summary> public static string EditCustomMoleculeDlg_OkDialog_Custom_molecules_must_have_a_mass_less_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_OkDialog_Custom_molecules_must_have_a_mass_less_than_or_equ" + "al_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify the ion mobility units.. /// </summary> public static string EditCustomMoleculeDlg_OkDialog_Please_specify_the_ion_mobility_units_ { get { return ResourceManager.GetString("EditCustomMoleculeDlg_OkDialog_Please_specify_the_ion_mobility_units_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The color scheme &apos;{0}&apos; already exists.. /// </summary> public static string EditCustomThemeDlg_buttonSave_Click_The_color_scheme___0___already_exists_ { get { return ResourceManager.GetString("EditCustomThemeDlg_buttonSave_Click_The_color_scheme___0___already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Colors must be entered in HEX or RGB format.. /// </summary> public static string EditCustomThemeDlg_dataGridViewColors_DataError_Colors_must_be_entered_in_HEX_or_RGB_format_ { get { return ResourceManager.GetString("EditCustomThemeDlg_dataGridViewColors_DataError_Colors_must_be_entered_in_HEX_or_" + "RGB_format_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to parse the color &apos;{0}&apos;. Use HEX or RGB format.. /// </summary> public static string EditCustomThemeDlg_DoPaste_Unable_to_parse_the_color___0____Use_HEX_or_RGB_format_ { get { return ResourceManager.GetString("EditCustomThemeDlg_DoPaste_Unable_to_parse_the_color___0____Use_HEX_or_RGB_format" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Set {0} to &apos;{1}&apos;. /// </summary> public static string EditDescription_GetUndoText_Set__0__to___1__ { get { return ResourceManager.GetString("EditDescription_GetUndoText_Set__0__to___1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured results must be completely loaded before they can be used to create a declustering potential regression.. /// </summary> public static string EditDPDlg_GetRegressionData_Measured_results_must_be_completely_loaded_before_they_can_be_used_to_create_a_declustering_potential_regression { get { return ResourceManager.GetString("EditDPDlg_GetRegressionData_Measured_results_must_be_completely_loaded_before_the" + "y_can_be_used_to_create_a_declustering_potential_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The declustering potential regression &apos;{0}&apos; already exists.. /// </summary> public static string EditDPDlg_OkDialog_The_declustering_potential_regression__0__already_exists { get { return ResourceManager.GetString("EditDPDlg_OkDialog_The_declustering_potential_regression__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Declustering Potential. /// </summary> public static string EditDPDlg_ShowGraph_Declustering_Potential { get { return ResourceManager.GetString("EditDPDlg_ShowGraph_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Declustering Potential Regression. /// </summary> public static string EditDPDlg_ShowGraph_Declustering_Potential_Regression { get { return ResourceManager.GetString("EditDPDlg_ShowGraph_Declustering_Potential_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string EditDPDlg_ShowGraph_Precursor_m_z { get { return ResourceManager.GetString("EditDPDlg_ShowGraph_Precursor_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insufficient data found to calculate a new regression.. /// </summary> public static string EditDPDlg_UseCurrentData_Insufficient_data_found_to_calculate_a_new_regression { get { return ResourceManager.GetString("EditDPDlg_UseCurrentData_Insufficient_data_found_to_calculate_a_new_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The enzyme &apos;{0}&apos; already exists.. /// </summary> public static string EditEnzymeDlg_OnClosing_The_enzyme__0__already_exists { get { return ResourceManager.GetString("EditEnzymeDlg_OnClosing_The_enzyme__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain at least one amino acid.. /// </summary> public static string EditEnzymeDlg_ValidateAATextBox__0__must_contain_at_least_one_amino_acid { get { return ResourceManager.GetString("EditEnzymeDlg_ValidateAATextBox__0__must_contain_at_least_one_amino_acid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The character &apos;{0}&apos; is not a valid amino acid.. /// </summary> public static string EditEnzymeDlg_ValidateAATextBox_The_character__0__is_not_a_valid_amino_acid { get { return ResourceManager.GetString("EditEnzymeDlg_ValidateAATextBox_The_character__0__is_not_a_valid_amino_acid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a valid regular expression.. /// </summary> public static string EditExclusionDlg_OkDialog__0__must_contain_a_valid_regular_expression_ { get { return ResourceManager.GetString("EditExclusionDlg_OkDialog__0__must_contain_a_valid_regular_expression_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide exclusion &apos;{0}&apos; already exists.. /// </summary> public static string EditExclusionDlg_OkDialog_The_peptide_exclusion__0__already_exists { get { return ResourceManager.GetString("EditExclusionDlg_OkDialog_The_peptide_exclusion__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; is not a valid regular expression.. /// </summary> public static string EditExclusionDlg_OkDialog_The_text__0__is_not_a_valid_regular_expression { get { return ResourceManager.GetString("EditExclusionDlg_OkDialog_The_text__0__is_not_a_valid_regular_expression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Monoisotopic loss:. /// </summary> public static string EditFragmentLossDlg_EditFragmentLossDlg__Monoisotopic_loss_ { get { return ResourceManager.GetString("EditFragmentLossDlg_EditFragmentLossDlg__Monoisotopic_loss_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A&amp;verage loss:. /// </summary> public static string EditFragmentLossDlg_EditFragmentLossDlg_A_verage_loss_ { get { return ResourceManager.GetString("EditFragmentLossDlg_EditFragmentLossDlg_A_verage_loss_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loss &amp;chemical formula:. /// </summary> public static string EditFragmentLossDlg_EditFragmentLossDlg_Loss__chemical_formula_ { get { return ResourceManager.GetString("EditFragmentLossDlg_EditFragmentLossDlg_Loss__chemical_formula_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Neutral loss masses must be greater than or equal to {0}.. /// </summary> public static string EditFragmentLossDlg_OkDialog_Neutral_loss_masses_must_be_greater_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditFragmentLossDlg_OkDialog_Neutral_loss_masses_must_be_greater_than_or_equal_to" + "__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Neutral loss masses must be less than or equal to {0}.. /// </summary> public static string EditFragmentLossDlg_OkDialog_Neutral_loss_masses_must_be_less_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditFragmentLossDlg_OkDialog_Neutral_loss_masses_must_be_less_than_or_equal_to__0" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a formula or constant masses.. /// </summary> public static string EditFragmentLossDlg_OkDialog_Please_specify_a_formula_or_constant_masses { get { return ResourceManager.GetString("EditFragmentLossDlg_OkDialog_Please_specify_a_formula_or_constant_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The loss &apos;{0}&apos; already exists. /// </summary> public static string EditFragmentLossDlg_OkDialog_The_loss__0__already_exists { get { return ResourceManager.GetString("EditFragmentLossDlg_OkDialog_The_loss__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to open a new ion mobility library file? Any changes to the current library will be lost.. /// </summary> public static string EditIonMobilityLibraryDlg_btnBrowseDb_Click_Are_you_sure_you_want_to_open_a_new_ion_mobility_library_file___Any_changes_to_the_current_library_will_be_lost_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_btnBrowseDb_Click_Are_you_sure_you_want_to_open_a_new_i" + "on_mobility_library_file___Any_changes_to_the_current_library_will_be_lost_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Open Ion Mobility Library. /// </summary> public static string EditIonMobilityLibraryDlg_btnBrowseDb_Click_Open_Ion_Mobility_Library { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_btnBrowseDb_Click_Open_Ion_Mobility_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to create a new ion mobility library file? Any changes to the current library will be lost.. /// </summary> public static string EditIonMobilityLibraryDlg_btnCreateDb_Click_Are_you_sure_you_want_to_create_a_new_ion_mobility_library_file___Any_changes_to_the_current_library_will_be_lost_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_btnCreateDb_Click_Are_you_sure_you_want_to_create_a_new" + "_ion_mobility_library_file___Any_changes_to_the_current_library_will_be_lost_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Create Ion Mobility Library. /// </summary> public static string EditIonMobilityLibraryDlg_btnCreateDb_Click_Create_Ion_Mobility_Library { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_btnCreateDb_Click_Create_Ion_Mobility_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion mobility library file {0} could not be created.. /// </summary> public static string EditIonMobilityLibraryDlg_CreateDatabase_The_ion_mobility_library_file__0__could_not_be_created { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_CreateDatabase_The_ion_mobility_library_file__0__could_" + "not_be_created", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adduct. /// </summary> public static string EditIonMobilityLibraryDlg_EditIonMobilityLibraryDlg_Adduct { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_EditIonMobilityLibraryDlg_Adduct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string EditIonMobilityLibraryDlg_EditIonMobilityLibraryDlg_Molecule { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_EditIonMobilityLibraryDlg_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finding ion mobility values for peaks. /// </summary> public static string EditIonMobilityLibraryDlg_GetDriftTimesFromResults_Finding_ion_mobility_values_for_peaks { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_GetDriftTimesFromResults_Finding_ion_mobility_values_fo" + "r_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An ion mobility library with the name {0} already exists. Do you want to overwrite it?. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_An_ion_mobility_library_with_the_name__0__already_exists__Do_you_want_to_overwrite_it_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_An_ion_mobility_library_with_the_name__0__alre" + "ady_exists__Do_you_want_to_overwrite_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click the Create button to create a new library or the Open button to open an existing library file.. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Click_the_Create_button_to_create_a_new_library_or_the_Open_button_to_open_an_existing_library_file_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Click_the_Create_button_to_create_a_new_librar" + "y_or_the_Open_button_to_open_an_existing_library_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to change it?. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Do_you_want_to_change_it_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Do_you_want_to_change_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure updating peptides in the ion mobility library. The library may be out of synch.. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Failure_updating_peptides_in_the_ion_mobility_library__The_library_may_be_out_of_synch_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Failure_updating_peptides_in_the_ion_mobility_" + "library__The_library_may_be_out_of_synch_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a file for the ion mobility library.. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Please_choose_a_file_for_the_ion_mobility_library { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Please_choose_a_file_for_the_ion_mobility_libr" + "ary", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter a name for the ion mobility library.. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Please_enter_a_name_for_the_ion_mobility_library_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Please_enter_a_name_for_the_ion_mobility_libra" + "ry_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please use a full path to a file for the ion mobility library.. /// </summary> public static string EditIonMobilityLibraryDlg_OkDialog_Please_use_a_full_path_to_a_file_for_the_ion_mobility_library_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OkDialog_Please_use_a_full_path_to_a_file_for_the_ion_m" + "obility_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist. Click the Create button to create a new ion mobility library or click the Open button to find the missing file.. /// </summary> public static string EditIonMobilityLibraryDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Create_button_to_create_a_new_ion_mobility_library_or_click_the_Open_button_to_find_the_missing_file_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Cre" + "ate_button_to_create_a_new_ion_mobility_library_or_click_the_Open_button_to_find" + "_the_missing_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Peptide. /// </summary> public static string EditIonMobilityLibraryDlg_UpdateNumPeptides__0__Peptide { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_UpdateNumPeptides__0__Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Peptides. /// </summary> public static string EditIonMobilityLibraryDlg_UpdateNumPeptides__0__Peptides { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_UpdateNumPeptides__0__Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Precursor Ion. /// </summary> public static string EditIonMobilityLibraryDlg_UpdateNumPrecursorIons__0__Precursor_Ion { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_UpdateNumPrecursorIons__0__Precursor_Ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Precursor Ions. /// </summary> public static string EditIonMobilityLibraryDlg_UpdateNumPrecursorIons__0__Precursor_Ions { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_UpdateNumPrecursorIons__0__Precursor_Ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A value is required.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateCell_A_value_is_required_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateCell_A_value_is_required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The entry {0} is not valid.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateCell_The_entry__0__is_not_valid_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateCell_The_entry__0__is_not_valid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {0} is not a valid modified peptide sequence.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidatePeptideList_The_value__0__is_not_a_valid_modified_peptide_sequence_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidatePeptideList_The_value__0__is_not_a_valid_modifi" + "ed_peptide_sequence_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0} {1}. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateRegressionCellValues_On_line__0___1_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateRegressionCellValues_On_line__0___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid charge. Charges must be integer values between 1 and {1}.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_charge__Charges_must_be_integer_values_between_1_and__1__ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_val" + "id_charge__Charges_must_be_integer_values_between_1_and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid intercept.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_intercept_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_val" + "id_intercept_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the value {0} is not a valid slope.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_valid_slope_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateRegressionCellValues_the_value__0__is_not_a_val" + "id_slope_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolving power must be greater than 0.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateResolvingPower_Resolving_power_must_be_greater_than_0_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateResolvingPower_Resolving_power_must_be_greater_" + "than_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following ions have multiple ion mobility values. Skyline supports multiple conformers, so this may be intentional.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateUniquePrecursors_The_following_ions_have_multiple_ion_mobility_values__Skyline_supports_multiple_conformers__so_this_may_be_intentional_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateUniquePrecursors_The_following_ions_have_multip" + "le_ion_mobility_values__Skyline_supports_multiple_conformers__so_this_may_be_int" + "entional_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion {0} has multiple ion mobility values. Skyline supports multiple conformers, so this may be intentional.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateUniquePrecursors_The_ion__0__has_multiple_ion_mobility_values__Skyline_supports_multiple_conformers__so_this_may_be_intentional_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateUniquePrecursors_The_ion__0__has_multiple_ion_m" + "obility_values__Skyline_supports_multiple_conformers__so_this_may_be_intentional" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This list contains {0} ions with multiple ion mobility values. Skyline supports multiple conformers, so this may be intentional.. /// </summary> public static string EditIonMobilityLibraryDlg_ValidateUniquePrecursors_This_list_contains__0__ions_with_multiple_ion_mobility_values__Skyline_supports_multiple_conformers__so_this_may_be_intentional_ { get { return ResourceManager.GetString("EditIonMobilityLibraryDlg_ValidateUniquePrecursors_This_list_contains__0__ions_wi" + "th_multiple_ion_mobility_values__Skyline_supports_multiple_conformers__so_this_m" + "ay_be_intentional_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to open a new database file? Any changes to the current calculator will be lost.. /// </summary> public static string EditIrtCalcDlg_btnBrowseDb_Click_Are_you_sure_you_want_to_open_a_new_database_file_Any_changes_to_the_current_calculator_will_be_lost { get { return ResourceManager.GetString("EditIrtCalcDlg_btnBrowseDb_Click_Are_you_sure_you_want_to_open_a_new_database_fil" + "e_Any_changes_to_the_current_calculator_will_be_lost", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Open iRT Database. /// </summary> public static string EditIrtCalcDlg_btnBrowseDb_Click_Open_iRT_Database { get { return ResourceManager.GetString("EditIrtCalcDlg_btnBrowseDb_Click_Open_iRT_Database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to create a new database file? Any changes to the current calculator will be lost.. /// </summary> public static string EditIrtCalcDlg_btnCreateDb_Click_Are_you_sure_you_want_to_create_a_new_database_file_Any_changes_to_the_current_calculator_will_be_lost { get { return ResourceManager.GetString("EditIrtCalcDlg_btnCreateDb_Click_Are_you_sure_you_want_to_create_a_new_database_f" + "ile_Any_changes_to_the_current_calculator_will_be_lost", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Create iRT Database. /// </summary> public static string EditIrtCalcDlg_btnCreateDb_Click_Create_iRT_Database { get { return ResourceManager.GetString("EditIrtCalcDlg_btnCreateDb_Click_Create_iRT_Database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removed peptides on iRT standard protein. /// </summary> public static string EditIrtCalcDlg_ChangeStandardPeptides_Removed_peptides_on_iRT_standard_protein { get { return ResourceManager.GetString("EditIrtCalcDlg_ChangeStandardPeptides_Removed_peptides_on_iRT_standard_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The list of standard peptides must contain only recognized iRT-C18 standards to switch to a predefined set of iRT-C18 standards.. /// </summary> public static string EditIrtCalcDlg_comboStandards_SelectedIndexChanged_The_list_of_standard_peptides_must_contain_only_recognized_iRT_C18_standards_to_switch_to_a_predefined_set_of_iRT_C18_standards_ { get { return ResourceManager.GetString("EditIrtCalcDlg_comboStandards_SelectedIndexChanged_The_list_of_standard_peptides_" + "must_contain_only_recognized_iRT_C18_standards_to_switch_to_a_predefined_set_of_" + "iRT_C18_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be created.. /// </summary> public static string EditIrtCalcDlg_CreateDatabase_The_file__0__could_not_be_created { get { return ResourceManager.GetString("EditIrtCalcDlg_CreateDatabase_The_file__0__could_not_be_created", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A calculator with the name {0} already exists. Do you want to overwrite it?. /// </summary> public static string EditIrtCalcDlg_OkDialog_A_calculator_with_the_name__0__already_exists_Do_you_want_to_overwrite_it { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_A_calculator_with_the_name__0__already_exists_Do_you_want" + "_to_overwrite_it", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chromatogram libraries cannot be modified. You must save this iRT calculator as a new file.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Chromatogram_libraries_cannot_be_modified__You_must_save_this_iRT_calculator_as_a_new_file_ { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Chromatogram_libraries_cannot_be_modified__You_must_save_" + "this_iRT_calculator_as_a_new_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click the Create button to create a new database or the Open button to open an existing database file.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Click_the_Create_button_to_create_a_new_database_or_the_Open_button_to_open_an_existing_database_file { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Click_the_Create_button_to_create_a_new_database_or_the_O" + "pen_button_to_open_an_existing_database_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure updating peptides in the iRT database. The database may be out of synch.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Failure_updating_peptides_in_the_iRT_database___The_database_may_be_out_of_synch_ { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Failure_updating_peptides_in_the_iRT_database___The_datab" + "ase_may_be_out_of_synch_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to library. /// </summary> public static string EditIrtCalcDlg_OkDialog_library_table_name { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_library_table_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a database file for the iRT calculator.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Please_choose_a_database_file_for_the_iRT_calculator { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Please_choose_a_database_file_for_the_iRT_calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter a name for the iRT calculator.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Please_enter_a_name_for_the_iRT_calculator { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Please_enter_a_name_for_the_iRT_calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter at least {0} standard peptides.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Please_enter_at_least__0__standard_peptides { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Please_enter_at_least__0__standard_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please use a full path to a database file for the iRT calculator.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Please_use_a_full_path_to_a_database_file_for_the_iRT_calculator { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Please_use_a_full_path_to_a_database_file_for_the_iRT_cal" + "culator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectral libraries cannot be modified. You must save this iRT calculator as a new file.. /// </summary> public static string EditIrtCalcDlg_OkDialog_Spectral_libraries_cannot_be_modified__You_must_save_this_iRT_calculator_as_a_new_file_ { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Spectral_libraries_cannot_be_modified__You_must_save_this" + "_iRT_calculator_as_a_new_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to standard. /// </summary> public static string EditIrtCalcDlg_OkDialog_standard_table_name { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_standard_table_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Using fewer than {0} standard peptides is not recommended. Are you sure you want to continue with only {1}?. /// </summary> public static string EditIrtCalcDlg_OkDialog_Using_fewer_than__0__standard_peptides_is_not_recommended_Are_you_sure_you_want_to_continue_with_only__1__ { get { return ResourceManager.GetString("EditIrtCalcDlg_OkDialog_Using_fewer_than__0__standard_peptides_is_not_recommended" + "_Are_you_sure_you_want_to_continue_with_only__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist. Click the Create button to create a new database or the Open button to find the missing file.. /// </summary> public static string EditIrtCalcDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Create_button_to_create_a_new_database_or_the_Open_button_to_find_the_missing_file_ { get { return ResourceManager.GetString("EditIrtCalcDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Create_button_" + "to_create_a_new_database_or_the_Open_button_to_find_the_missing_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not get a minimum or maximum standard peptide.. /// </summary> public static string EditIrtCalcDlg_RecalibrateStandards_Could_not_get_a_minimum_or_maximum_standard_peptide_ { get { return ResourceManager.GetString("EditIrtCalcDlg_RecalibrateStandards_Could_not_get_a_minimum_or_maximum_standard_p" + "eptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Peptide. /// </summary> public static string EditIrtCalcDlg_UpdateNumPeptides__0__Peptide { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumPeptides__0__Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Peptides. /// </summary> public static string EditIrtCalcDlg_UpdateNumPeptides__0__Peptides { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumPeptides__0__Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cali&amp;brate.... /// </summary> public static string EditIrtCalcDlg_UpdateNumPeptides_Calibrate { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumPeptides_Calibrate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Recali&amp;brate.... /// </summary> public static string EditIrtCalcDlg_UpdateNumPeptides_Recalibrate { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumPeptides_Recalibrate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Standard peptide ({1} required). /// </summary> public static string EditIrtCalcDlg_UpdateNumStandards__0__Standard_peptide___1__required_ { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumStandards__0__Standard_peptide___1__required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Standard peptides ({1} required). /// </summary> public static string EditIrtCalcDlg_UpdateNumStandards__0__Standard_peptides___1__required_ { get { return ResourceManager.GetString("EditIrtCalcDlg_UpdateNumStandards__0__Standard_peptides___1__required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide {0} appears in the {1} table more than once.. /// </summary> public static string EditIrtCalcDlg_ValidatePeptideList_The_peptide__0__appears_in_the__1__table_more_than_once { get { return ResourceManager.GetString("EditIrtCalcDlg_ValidatePeptideList_The_peptide__0__appears_in_the__1__table_more_" + "than_once", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {0} is not a valid modified peptide sequence.. /// </summary> public static string EditIrtCalcDlg_ValidatePeptideList_The_value__0__is_not_a_valid_modified_peptide_sequence { get { return ResourceManager.GetString("EditIrtCalcDlg_ValidatePeptideList_The_value__0__is_not_a_valid_modified_peptide_" + "sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Margin. /// </summary> public static string EditIsolationSchemeDlg_comboMargins_SelectedIndexChanged_Margin { get { return ResourceManager.GetString("EditIsolationSchemeDlg_comboMargins_SelectedIndexChanged_Margin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start margin. /// </summary> public static string EditIsolationSchemeDlg_comboMargins_SelectedIndexChanged_Start_margin { get { return ResourceManager.GetString("EditIsolationSchemeDlg_comboMargins_SelectedIndexChanged_Start_margin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overlap requires an even number of windows.. /// </summary> public static string EditIsolationSchemeDlg_GetIsolationWindows_Overlap_requires_an_even_number_of_windows_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_GetIsolationWindows_Overlap_requires_an_even_number_of_win" + "dows_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading isolation scheme.. /// </summary> public static string EditIsolationSchemeDlg_ImportRangesFromFiles_Failed_reading_isolation_scheme_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_ImportRangesFromFiles_Failed_reading_isolation_scheme_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading isolation scheme.... /// </summary> public static string EditIsolationSchemeDlg_ImportRangesFromFiles_Reading_isolation_scheme___ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_ImportRangesFromFiles_Reading_isolation_scheme___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Specify {0} for isolation window.. /// </summary> public static string EditIsolationSchemeDlg_OkDialog_Specify__0__for_isolation_window { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OkDialog_Specify__0__for_isolation_window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Specify Start and End values for at least one isolation window.. /// </summary> public static string EditIsolationSchemeDlg_OkDialog_Specify_Start_and_End_values_for_at_least_one_isolation_window { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OkDialog_Specify_Start_and_End_values_for_at_least_one_iso" + "lation_window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The isolation scheme named &apos;{0}&apos; already exists.. /// </summary> public static string EditIsolationSchemeDlg_OkDialog_The_isolation_scheme_named__0__already_exists { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OkDialog_The_isolation_scheme_named__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected target is not unique.. /// </summary> public static string EditIsolationSchemeDlg_OkDialog_The_selected_target_is_not_unique { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OkDialog_The_selected_target_is_not_unique", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are gaps in a single cycle of your extraction windows. Are you sure you want to continue?. /// </summary> public static string EditIsolationSchemeDlg_OkDialog_There_are_gaps_in_a_single_cycle_of_your_extraction_windows__Do_you_want_to_continue_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OkDialog_There_are_gaps_in_a_single_cycle_of_your_extracti" + "on_windows__Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiplex graphing is not supported.. /// </summary> public static string EditIsolationSchemeDlg_OpenGraph_Graphing_multiplexing_is_not_supported_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_OpenGraph_Graphing_multiplexing_is_not_supported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing isolation range for the isolation target {0} m/z in the file {1}. /// </summary> public static string EditIsolationSchemeDlg_ReadIsolationRanges_Missing_isolation_range_for_the_isolation_target__0__m_z_in_the_file__1_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_ReadIsolationRanges_Missing_isolation_range_for_the_isolat" + "ion_target__0__m_z_in_the_file__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No repeating isolation scheme found in {0}. /// </summary> public static string EditIsolationSchemeDlg_ReadIsolationRanges_No_repeating_isolation_scheme_found_in__0_ { get { return ResourceManager.GetString("EditIsolationSchemeDlg_ReadIsolationRanges_No_repeating_isolation_scheme_found_in" + "__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation &amp;width:. /// </summary> public static string EditIsolationSchemeDlg_UpdateIsolationWidths_Isolation_width { get { return ResourceManager.GetString("EditIsolationSchemeDlg_UpdateIsolationWidths_Isolation_width", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation &amp;widths:. /// </summary> public static string EditIsolationSchemeDlg_UpdateIsolationWidths_Isolation_widths { get { return ResourceManager.GetString("EditIsolationSchemeDlg_UpdateIsolationWidths_Isolation_widths", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are overlaps in a single cycle of your extraction windows. Are you sure you want to continue?. /// </summary> public static string EditIsolationSchemeDlgOkDialogThereAreOverlapsContinue { get { return ResourceManager.GetString("EditIsolationSchemeDlgOkDialogThereAreOverlapsContinue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The isotope enrichments named &apos;{0}&apos; already exist.. /// </summary> public static string EditIsotopeEnrichmentDlg_OkDialog_The_isotope_enrichments_named__0__already_exist { get { return ResourceManager.GetString("EditIsotopeEnrichmentDlg_OkDialog_The_isotope_enrichments_named__0__already_exist" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The entry {0} is not valid.. /// </summary> public static string EditIsotopeEnrichmentDlg_ValidateCell_The_entry__0__is_not_valid { get { return ResourceManager.GetString("EditIsotopeEnrichmentDlg_ValidateCell_The_entry__0__is_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The enrichment {0} must be between {1} and {2}.. /// </summary> public static string EditIsotopeEnrichmentDlg_ValidateEnrichment_The_enrichment__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("EditIsotopeEnrichmentDlg_ValidateEnrichment_The_enrichment__0__must_be_between__1" + "__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The label name &apos;{0}&apos; may not be used more than once.. /// </summary> public static string EditLabelTypeListDlg_OkDialog_The_label_name__0__may_not_be_used_more_than_once { get { return ResourceManager.GetString("EditLabelTypeListDlg_OkDialog_The_label_name__0__may_not_be_used_more_than_once", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The label names &apos;{0}&apos; and &apos;{1}&apos; conflict. Use more unique names.. /// </summary> public static string EditLabelTypeListDlg_OkDialog_The_label_names__0__and__1__conflict_Use_more_unique_names { get { return ResourceManager.GetString("EditLabelTypeListDlg_OkDialog_The_label_names__0__and__1__conflict_Use_more_uniqu" + "e_names", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; conflicts with the default light isotope label type.. /// </summary> public static string EditLabelTypeListDlg_OkDialog_The_name__0__conflicts_with_the_default_light_isotope_label_type { get { return ResourceManager.GetString("EditLabelTypeListDlg_OkDialog_The_name__0__conflicts_with_the_default_light_isoto" + "pe_label_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Legacy Libraries. /// </summary> public static string EditLibraryDlg_GetLibraryPath_Legacy_Libraries { get { return ResourceManager.GetString("EditLibraryDlg_GetLibraryPath_Legacy_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectral Libraries. /// </summary> public static string EditLibraryDlg_GetLibraryPath_Spectral_Libraries { get { return ResourceManager.GetString("EditLibraryDlg_GetLibraryPath_Spectral_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading chromatogram library. /// </summary> public static string EditLibraryDlg_OkDialog_Loading_chromatogram_library { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_Loading_chromatogram_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a non-redundant library.. /// </summary> public static string EditLibraryDlg_OkDialog_Please_choose_a_non_redundant_library { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_Please_choose_a_non_redundant_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} appears to be a redundant library.. /// </summary> public static string EditLibraryDlg_OkDialog_The_file__0__appears_to_be_a_redundant_library { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_The_file__0__appears_to_be_a_redundant_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string EditLibraryDlg_OkDialog_The_file__0__does_not_exist { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_The_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a supported spectral library file format.. /// </summary> public static string EditLibraryDlg_OkDialog_The_file__0__is_not_a_supported_spectral_library_file_format { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_The_file__0__is_not_a_supported_spectral_library_file_for" + "mat", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library &apos;{0}&apos; already exists.. /// </summary> public static string EditLibraryDlg_OkDialog_The_library__0__already_exists { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_The_library__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path {0} is a directory.. /// </summary> public static string EditLibraryDlg_OkDialog_The_path__0__is_a_directory { get { return ResourceManager.GetString("EditLibraryDlg_OkDialog_The_path__0__is_a_directory", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap EditLink { get { object obj = ResourceManager.GetObject("EditLink", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to The crosslinker &apos;{0}&apos; cannot attach to the amino acid &apos;{1}&apos;.. /// </summary> public static string EditLinkedPeptideDlg_TryMakeLinkedPeptide_The_crosslinker___0___cannot_attach_to_the_amino_acid___1___ { get { return ResourceManager.GetString("EditLinkedPeptideDlg_TryMakeLinkedPeptide_The_crosslinker___0___cannot_attach_to_" + "the_amino_acid___1___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Modifications. /// </summary> public static string EditLinkedPeptidesDlg_dataGridViewLinkedPeptides_CellToolTipTextNeeded_Edit_Modifications { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_dataGridViewLinkedPeptides_CellToolTipTextNeeded_Edit_Modif" + "ications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Both ends of this crosslink cannot be the same.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_Both_ends_of_this_crosslink_cannot_be_the_same_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_Both_ends_of_this_crosslink_cannot_be_the_sam" + "e_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid crosslinker. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_Invalid_crosslinker { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_Invalid_crosslinker", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The crosslinker &apos;{0}&apos; cannot attach to this amino acid position.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_The_crosslinker___0___cannot_attach_to_this_amino_acid_position_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_The_crosslinker___0___cannot_attach_to_this_a" + "mino_acid_position_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This amino acid position cannot be blank.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_This_amino_acid_position_cannot_be_blank_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_This_amino_acid_position_cannot_be_blank_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This amino acid position in this peptide is already being used by another crosslink.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_This_amino_acid_position_in_this_peptide_is_already_being_used_by_another_crosslink_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_This_amino_acid_position_in_this_peptide_is_a" + "lready_being_used_by_another_crosslink_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This is not a valid amino acid position in this peptide.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_This_is_not_a_valid_amino_acid_position_in_this_peptide_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_This_is_not_a_valid_amino_acid_position_in_th" + "is_peptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide is not valid.. /// </summary> public static string EditLinkedPeptidesDlg_MakeCrosslink_This_peptide_is_not_valid_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_MakeCrosslink_This_peptide_is_not_valid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Some crosslinked peptides are not connected.. /// </summary> public static string EditLinkedPeptidesDlg_OkDialog_Some_crosslinked_peptides_are_not_connected_ { get { return ResourceManager.GetString("EditLinkedPeptidesDlg_OkDialog_Some_crosslinked_peptides_are_not_connected_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This will reset the list to its default values. Continue?. /// </summary> public static string EditListDlg_btnReset_Click_This_will_reset_the_list_to_its_default_values_Continue { get { return ResourceManager.GetString("EditListDlg_btnReset_Click_This_will_reset_the_list_to_its_default_values_Continu" + "e", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Monoisotopic mass:. /// </summary> public static string EditMeasuredIonDlg_EditMeasuredIonDlg__Monoisotopic_mass_ { get { return ResourceManager.GetString("EditMeasuredIonDlg_EditMeasuredIonDlg__Monoisotopic_mass_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A&amp;verage mass:. /// </summary> public static string EditMeasuredIonDlg_EditMeasuredIonDlg_A_verage_mass_ { get { return ResourceManager.GetString("EditMeasuredIonDlg_EditMeasuredIonDlg_A_verage_mass_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion &amp;chemical formula:. /// </summary> public static string EditMeasuredIonDlg_EditMeasuredIonDlg_Ion__chemical_formula_ { get { return ResourceManager.GetString("EditMeasuredIonDlg_EditMeasuredIonDlg_Ion__chemical_formula_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a formula or constant masses.. /// </summary> public static string EditMeasuredIonDlg_OkDialog_Please_specify_a_formula_or_constant_masses { get { return ResourceManager.GetString("EditMeasuredIonDlg_OkDialog_Please_specify_a_formula_or_constant_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reporter ion masses must be greater than or equal to {0}.. /// </summary> public static string EditMeasuredIonDlg_OkDialog_Reporter_ion_masses_must_be_greater_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditMeasuredIonDlg_OkDialog_Reporter_ion_masses_must_be_greater_than_or_equal_to_" + "_0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reporter ion masses must be less than or equal to {0}.. /// </summary> public static string EditMeasuredIonDlg_OkDialog_Reporter_ion_masses_must_be_less_than_or_equal_to__0__ { get { return ResourceManager.GetString("EditMeasuredIonDlg_OkDialog_Reporter_ion_masses_must_be_less_than_or_equal_to__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The special ion &apos;{0}&apos; already exists.. /// </summary> public static string EditMeasuredIonDlg_OkDialog_The_special_ion__0__already_exists { get { return ResourceManager.GetString("EditMeasuredIonDlg_OkDialog_The_special_ion__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain at least one amino acid.. /// </summary> public static string EditMeasuredIonDlg_ValidateAATextBox__0__must_contain_at_least_one_amino_acid { get { return ResourceManager.GetString("EditMeasuredIonDlg_ValidateAATextBox__0__must_contain_at_least_one_amino_acid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The character &apos;{0}&apos; is not a valid amino acid.. /// </summary> public static string EditMeasuredIonDlg_ValidateAATextBox_The_character__0__is_not_a_valid_amino_acid { get { return ResourceManager.GetString("EditMeasuredIonDlg_ValidateAATextBox_The_character__0__is_not_a_valid_amino_acid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to create a new optimization library file? Any changes to the current library will be lost.. /// </summary> public static string EditOptimizationLibraryDlg_btnCreate_Click_Are_you_sure_you_want_to_create_a_new_optimization_library_file__Any_changes_to_the_current_library_will_be_lost_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_btnCreate_Click_Are_you_sure_you_want_to_create_a_new_" + "optimization_library_file__Any_changes_to_the_current_library_will_be_lost_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Create Optimization Library. /// </summary> public static string EditOptimizationLibraryDlg_btnCreate_Click_Create_Optimization_Library { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_btnCreate_Click_Create_Optimization_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to open a new optimization library file? Any changes to the current library will be lost.. /// </summary> public static string EditOptimizationLibraryDlg_btnOpen_Click_Are_you_sure_you_want_to_open_a_new_optimization_library_file__Any_changes_to_the_current_library_will_be_lost_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_btnOpen_Click_Are_you_sure_you_want_to_open_a_new_opti" + "mization_library_file__Any_changes_to_the_current_library_will_be_lost_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Open Optimization Library. /// </summary> public static string EditOptimizationLibraryDlg_btnOpen_Click_Open_Optimization_Library { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_btnOpen_Click_Open_Optimization_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be created.. /// </summary> public static string EditOptimizationLibraryDlg_CreateDatabase_The_file__0__could_not_be_created_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_CreateDatabase_The_file__0__could_not_be_created_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A library with the name {0} already exists. Do you want to overwrite it?. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_A_library_with_the_name__0__already_exists__Do_you_want_to_overwrite_it_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_A_library_with_the_name__0__already_exists__D" + "o_you_want_to_overwrite_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click the Create button to create a new library or the Open button to open an existing library file.. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_Click_the_Create_button_to_create_a_new_library_or_the_Open_button_to_open_an_existing_library_file_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_Click_the_Create_button_to_create_a_new_libra" + "ry_or_the_Open_button_to_open_an_existing_library_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure updating optimizations in the optimization library. The database may be out of synch.. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_Failure_updating_optimizations_in_the_optimization_library__The_database_may_be_out_of_synch_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_Failure_updating_optimizations_in_the_optimiz" + "ation_library__The_database_may_be_out_of_synch_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to library. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_library { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a library file for the optimization library.. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_Please_choose_a_library_file_for_the_optimization_library_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_Please_choose_a_library_file_for_the_optimiza" + "tion_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter a name for the optimization library.. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_Please_enter_a_name_for_the_optimization_library_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_Please_enter_a_name_for_the_optimization_libr" + "ary_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please use a full path to a library file for the optimization library.. /// </summary> public static string EditOptimizationLibraryDlg_OkDialog_Please_use_a_full_path_to_a_library_file_for_the_optimization_library_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OkDialog_Please_use_a_full_path_to_a_library_file_for_" + "the_optimization_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist. Click the Create button to create a new library or the Open button to find the missing file.. /// </summary> public static string EditOptimizationLibraryDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Create_button_to_create_a_new_library_or_the_Open_button_to_find_the_missing_file_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_OpenDatabase_The_file__0__does_not_exist__Click_the_Cr" + "eate_button_to_create_a_new_library_or_the_Open_button_to_find_the_missing_file_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized collision energies. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_collision_energies { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_collision_energie" + "s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized collision energy. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_collision_energy { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_collision_energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized compensation voltage. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_compensation_voltage { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_compensation_volt" + "age", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized compensation voltages. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_compensation_voltages { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_compensation_volt" + "ages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized declustering potential. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_declustering_potential { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_declustering_pote" + "ntial", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} optimized declustering potentials. /// </summary> public static string EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_declustering_potentials { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateNumOptimizations__0__optimized_declustering_pote" + "ntials", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision Energy. /// </summary> public static string EditOptimizationLibraryDlg_UpdateValueHeader_Collision_Energy { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateValueHeader_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Declustering Potential. /// </summary> public static string EditOptimizationLibraryDlg_UpdateValueHeader_Declustering_Potential { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_UpdateValueHeader_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The optimization with sequence {0}, charge {1}, fragment ion {2}, and product charge {3} appears in the {4} table more than once.. /// </summary> public static string EditOptimizationLibraryDlg_ValidateOptimizationList_The_optimization_with_sequence__0___charge__1___fragment_ion__2__and_product_charge__3__appears_in_the__4__table_more_than_once_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_ValidateOptimizationList_The_optimization_with_sequenc" + "e__0___charge__1___fragment_ion__2__and_product_charge__3__appears_in_the__4__ta" + "ble_more_than_once_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {0} is not a valid modified peptide sequence.. /// </summary> public static string EditOptimizationLibraryDlg_ValidateOptimizationList_The_value__0__is_not_a_valid_modified_peptide_sequence_ { get { return ResourceManager.GetString("EditOptimizationLibraryDlg_ValidateOptimizationList_The_value__0__is_not_a_valid_" + "modified_peptide_sequence_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot train model without either decoys or second best peaks included.. /// </summary> public static string EditPeakScoringModelDlg_btnTrainModel_Click_Cannot_train_model_without_either_decoys_or_second_best_peaks_included_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_btnTrainModel_Click_Cannot_train_model_without_either_dec" + "oys_or_second_best_peaks_included_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed training the model:. /// </summary> public static string EditPeakScoringModelDlg_btnTrainModel_Click_Failed_training_the_model_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_btnTrainModel_Click_Failed_training_the_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Composite Score (Normalized). /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_Composite_Score__Normalized_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_Composite_Score__Normalized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to P Value. /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_P_Value { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_P_Value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Q value. /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_Q_value { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_Q_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Train a model to see composite score. /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_composite_score { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_composite_sc" + "ore", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Train a model to see P value distribution. /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_P_value_distribution { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_P_value_dist" + "ribution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Train a model to see Q value distribution. /// </summary> public static string EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_Q_value_distribution { get { return ResourceManager.GetString("EditPeakScoringModelDlg_EditPeakScoringModelDlg_Train_a_model_to_see_Q_value_dist" + "ribution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak count. /// </summary> public static string EditPeakScoringModelDlg_InitGraphPane_Peak_count { get { return ResourceManager.GetString("EditPeakScoringModelDlg_InitGraphPane_Peak_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Score. /// </summary> public static string EditPeakScoringModelDlg_InitGraphPane_Score { get { return ResourceManager.GetString("EditPeakScoringModelDlg_InitGraphPane_Score", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peak scoring model {0} already exists. /// </summary> public static string EditPeakScoringModelDlg_OkDialog_The_peak_scoring_model__0__already_exists { get { return ResourceManager.GetString("EditPeakScoringModelDlg_OkDialog_The_peak_scoring_model__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected Coefficient Sign. /// </summary> public static string EditPeakScoringModelDlg_OnDataBindingComplete_Unexpected_Coefficient_Sign { get { return ResourceManager.GetString("EditPeakScoringModelDlg_OnDataBindingComplete_Unexpected_Coefficient_Sign", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid model selection.. /// </summary> public static string EditPeakScoringModelDlg_SelectedModelItem_Invalid_Model_Selection { get { return ResourceManager.GetString("EditPeakScoringModelDlg_SelectedModelItem_Invalid_Model_Selection", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating. /// </summary> public static string EditPeakScoringModelDlg_TrainModel_Calculating { get { return ResourceManager.GetString("EditPeakScoringModelDlg_TrainModel_Calculating", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating score contributions. /// </summary> public static string EditPeakScoringModelDlg_TrainModel_Calculating_score_contributions { get { return ResourceManager.GetString("EditPeakScoringModelDlg_TrainModel_Calculating_score_contributions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are no decoy peptides in the current document. Uncheck the Use Decoys Box.. /// </summary> public static string EditPeakScoringModelDlg_TrainModel_There_are_no_decoy_peptides_in_the_current_document__Uncheck_the_Use_Decoys_Box_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_TrainModel_There_are_no_decoy_peptides_in_the_current_doc" + "ument__Uncheck_the_Use_Decoys_Box_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Training. /// </summary> public static string EditPeakScoringModelDlg_TrainModel_Training { get { return ResourceManager.GetString("EditPeakScoringModelDlg_TrainModel_Training", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scoring. /// </summary> public static string EditPeakScoringModelDlg_TrainModelClick_Scoring { get { return ResourceManager.GetString("EditPeakScoringModelDlg_TrainModelClick_Scoring", resourceCulture); } } /// <summary> /// Looks up a localized string similar to unknown. /// </summary> public static string EditPeakScoringModelDlg_UpdateCalculatorGraph_unknown { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateCalculatorGraph_unknown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Combined normal distribution. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Combined_normal_distribution { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Combined_normal_distribution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoy normal distribution. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Decoy_normal_distribution { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Decoy_normal_distribution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoys. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Decoys { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Decoys", resourceCulture); } } /// <summary> /// Looks up a localized string similar to P values of target peptides. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_P_values_of_target_peptides { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_P_values_of_target_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pi zero (expected nulls). /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Pi_zero__expected_nulls_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Pi_zero__expected_nulls_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Second best peaks. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Second_best_peaks { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Second_best_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Second best peaks normal distribution. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Second_best_peaks_normal_distribution { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Second_best_peaks_normal_distribution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Targets { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Targets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Trained model is not applicable to current dataset.. /// </summary> public static string EditPeakScoringModelDlg_UpdateModelGraph_Trained_model_is_not_applicable_to_current_dataset_ { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateModelGraph_Trained_model_is_not_applicable_to_curre" + "nt_dataset_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Q values of target peptides. /// </summary> public static string EditPeakScoringModelDlg_UpdateQValueGraph_Q_values_of_target_peptides { get { return ResourceManager.GetString("EditPeakScoringModelDlg_UpdateQValueGraph_Q_values_of_target_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Discard. /// </summary> public static string EditPepModsDlg_EnsureLinkedPeptide_ButtonText_Discard { get { return ResourceManager.GetString("EditPepModsDlg_EnsureLinkedPeptide_ButtonText_Discard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Crosslinks. /// </summary> public static string EditPepModsDlg_EnsureLinkedPeptide_ButtonText_Edit_Crosslinks { get { return ResourceManager.GetString("EditPepModsDlg_EnsureLinkedPeptide_ButtonText_Edit_Crosslinks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removing the crosslink on this amino acid will result in a crosslinked peptide no longer being connected. Would you like to edit the crosslinks now or discard the disconnected peptides?. /// </summary> public static string EditPepModsDlg_EnsureLinkedPeptide_Discard_or_edit_disconnected_crosslinks { get { return ResourceManager.GetString("EditPepModsDlg_EnsureLinkedPeptide_Discard_or_edit_disconnected_crosslinks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope {0}:. /// </summary> public static string EditPepModsDlg_GetIsotopeLabelText_Isotope__0__ { get { return ResourceManager.GetString("EditPepModsDlg_GetIsotopeLabelText_Isotope__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Crosslink to {0}: {1} [{2}]. /// </summary> public static string EditPepModsDlg_GetTooltip_Crosslink_to__0____1____2__ { get { return ResourceManager.GetString("EditPepModsDlg_GetTooltip_Crosslink_to__0____1____2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid crosslink: {0}. /// </summary> public static string EditPepModsDlg_GetTooltip_Invalid_crosslink___0_ { get { return ResourceManager.GetString("EditPepModsDlg_GetTooltip_Invalid_crosslink___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Looplink: {0} [{1}]. /// </summary> public static string EditPepModsDlg_GetTooltip_Looplink___0____1__ { get { return ResourceManager.GetString("EditPepModsDlg_GetTooltip_Looplink___0____1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to One or more of the crosslinked peptides are no longer attached to this peptide. . /// </summary> public static string EditPepModsDlg_OkDialog_One_or_more_of_the_crosslinked_peptides_are_no_longer_attached_to_this_peptide__ { get { return ResourceManager.GetString("EditPepModsDlg_OkDialog_One_or_more_of_the_crosslinked_peptides_are_no_longer_att" + "ached_to_this_peptide__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error connecting to server: . /// </summary> public static string EditRemoteAccountDlg_TestSettings_Error_connecting_to_server__ { get { return ResourceManager.GetString("EditRemoteAccountDlg_TestSettings_Error_connecting_to_server__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Settings are correct. /// </summary> public static string EditRemoteAccountDlg_TestSettings_Settings_are_correct { get { return ResourceManager.GetString("EditRemoteAccountDlg_TestSettings_Settings_are_correct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while trying to authenticate.. /// </summary> public static string EditRemoteAccountDlg_TestUnifiAccount_An_error_occurred_while_trying_to_authenticate_ { get { return ResourceManager.GetString("EditRemoteAccountDlg_TestUnifiAccount_An_error_occurred_while_trying_to_authentic" + "ate_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An exception occurred while trying to fetch the directory listing.. /// </summary> public static string EditRemoteAccountDlg_TestUnifiAccount_An_exception_occurred_while_trying_to_fetch_the_directory_listing_ { get { return ResourceManager.GetString("EditRemoteAccountDlg_TestUnifiAccount_An_exception_occurred_while_trying_to_fetch" + "_the_directory_listing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid server URL.. /// </summary> public static string EditRemoteAccountDlg_ValidateValues_Invalid_server_URL_ { get { return ResourceManager.GetString("EditRemoteAccountDlg_ValidateValues_Invalid_server_URL_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Server cannot be blank. /// </summary> public static string EditRemoteAccountDlg_ValidateValues_Server_cannot_be_blank { get { return ResourceManager.GetString("EditRemoteAccountDlg_ValidateValues_Server_cannot_be_blank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Server URL must start with https:// or http://. /// </summary> public static string EditRemoteAccountDlg_ValidateValues_Server_URL_must_start_with_https____or_http___ { get { return ResourceManager.GetString("EditRemoteAccountDlg_ValidateValues_Server_URL_must_start_with_https____or_http__" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already an account defined for the user {0} on the server {1}. /// </summary> public static string EditRemoteAccountDlg_ValidateValues_There_is_already_an_account_defined_for_the_user__0__on_the_server__1_ { get { return ResourceManager.GetString("EditRemoteAccountDlg_ValidateValues_There_is_already_an_account_defined_for_the_u" + "ser__0__on_the_server__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Username cannot be blank. /// </summary> public static string EditRemoteAccountDlg_ValidateValues_Username_cannot_be_blank { get { return ResourceManager.GetString("EditRemoteAccountDlg_ValidateValues_Username_cannot_be_blank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be greater than 0.. /// </summary> public static string EditRTDlg_OkDialog__0__must_be_greater_than_0 { get { return ResourceManager.GetString("EditRTDlg_OkDialog__0__must_be_greater_than_0", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time prediction requires a calculator algorithm.. /// </summary> public static string EditRTDlg_OkDialog_Retention_time_prediction_requires_a_calculator_algorithm { get { return ResourceManager.GetString("EditRTDlg_OkDialog_Retention_time_prediction_requires_a_calculator_algorithm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The retention time regression &apos;{0}&apos; already exists.. /// </summary> public static string EditRTDlg_OkDialog_The_retention_time_regression__0__already_exists { get { return ResourceManager.GetString("EditRTDlg_OkDialog_The_retention_time_regression__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ({0} peptides, R = {1}). /// </summary> public static string EditRTDlg_RecalcRegression__0__peptides_R__1__ { get { return ResourceManager.GetString("EditRTDlg_RecalcRegression__0__peptides_R__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to initialize the calculator {0}.. /// </summary> public static string EditRTDlg_ShowGraph_An_error_occurred_attempting_to_initialize_the_calculator__0__ { get { return ResourceManager.GetString("EditRTDlg_ShowGraph_An_error_occurred_attempting_to_initialize_the_calculator__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Initializing. /// </summary> public static string EditRTDlg_ShowGraph_Initializing { get { return ResourceManager.GetString("EditRTDlg_ShowGraph_Initializing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Initializing {0} calculator. /// </summary> public static string EditRTDlg_ShowGraph_Initializing__0__calculator { get { return ResourceManager.GetString("EditRTDlg_ShowGraph_Initializing__0__calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured Time. /// </summary> public static string EditRTDlg_ShowGraph_Measured_Time { get { return ResourceManager.GetString("EditRTDlg_ShowGraph_Measured_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Times by Score. /// </summary> public static string EditRTDlg_ShowGraph_Retention_Times_by_Score { get { return ResourceManager.GetString("EditRTDlg_ShowGraph_Retention_Times_by_Score", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Calculate &lt;&lt;. /// </summary> public static string EditRTDlg_ShowPeptides_Calculate_Left { get { return ResourceManager.GetString("EditRTDlg_ShowPeptides_Calculate_Left", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Calculate &gt;&gt;. /// </summary> public static string EditRTDlg_ShowPeptides_Calculate_Right { get { return ResourceManager.GetString("EditRTDlg_ShowPeptides_Calculate_Right", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The {0} calculator cannot score any of the peptides.. /// </summary> public static string EditRTDlg_UpdateCalculator_The__0__calculator_cannot_score_any_of_the_peptides { get { return ResourceManager.GetString("EditRTDlg_UpdateCalculator_The__0__calculator_cannot_score_any_of_the_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The calculator cannot be used to score peptides. Please check its settings.. /// </summary> public static string EditRTDlg_UpdateCalculator_The_calculator_cannot_be_used_to_score_peptides_Please_check_its_settings { get { return ResourceManager.GetString("EditRTDlg_UpdateCalculator_The_calculator_cannot_be_used_to_score_peptides_Please" + "_check_its_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The server &apos;{0}&apos; already exists.. /// </summary> public static string EditServerDlg_OkDialog_The_server__0__already_exists_ { get { return ResourceManager.GetString("EditServerDlg_OkDialog_The_server__0__already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The server {0} is not a Panorama server.. /// </summary> public static string EditServerDlg_OkDialog_The_server__0__is_not_a_Panorama_server { get { return ResourceManager.GetString("EditServerDlg_OkDialog_The_server__0__is_not_a_Panorama_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; is not a valid server name.. /// </summary> public static string EditServerDlg_OkDialog_The_text__0__is_not_a_valid_server_name_ { get { return ResourceManager.GetString("EditServerDlg_OkDialog_The_text__0__is_not_a_valid_server_name_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The username and password could not be authenticated with the panorama server.. /// </summary> public static string EditServerDlg_OkDialog_The_username_and_password_could_not_be_authenticated_with_the_panorama_server { get { return ResourceManager.GetString("EditServerDlg_OkDialog_The_username_and_password_could_not_be_authenticated_with_" + "the_panorama_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown error connecting to the server {0}.. /// </summary> public static string EditServerDlg_OkDialog_Unknown_error_connecting_to_the_server__0__ { get { return ResourceManager.GetString("EditServerDlg_OkDialog_Unknown_error_connecting_to_the_server__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Verifying server information.. /// </summary> public static string EditServerDlg_OkDialog_Verifying_server_information { get { return ResourceManager.GetString("EditServerDlg_OkDialog_Verifying_server_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The server {0} does not exist.. /// </summary> public static string EditServerDlg_VerifyServerInformation_The_server__0__does_not_exist { get { return ResourceManager.GetString("EditServerDlg_VerifyServerInformation_The_server__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Chemical formula:. /// </summary> public static string EditStaticModDlg_EditStaticModDlg_Chemical_formula_ { get { return ResourceManager.GetString("EditStaticModDlg_EditStaticModDlg_Chemical_formula_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click &apos;Custom&apos; to use the name &apos;{0}&apos;.. /// </summary> public static string EditStaticModDlg_OkDialog_Click__Custom__to_use_the_name___0___ { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Click__Custom__to_use_the_name___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click &apos;Unimod&apos; to use the name &apos;{0}&apos;.. /// </summary> public static string EditStaticModDlg_OkDialog_Click__Unimod__to_use_the_name___0___ { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Click__Unimod__to_use_the_name___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue?. /// </summary> public static string EditStaticModDlg_OkDialog_Continue { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom. /// </summary> public static string EditStaticModDlg_OkDialog_Custom { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Custom", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Labeled atoms on terminal modification are not valid.. /// </summary> public static string EditStaticModDlg_OkDialog_Labeled_atoms_on_terminal_modification_are_not_valid { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Labeled_atoms_on_terminal_modification_are_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The modification &apos;{0}&apos; already exists.. /// </summary> public static string EditStaticModDlg_OkDialog_The_modification__0__already_exists { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_The_modification__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The variable checkbox only applies to precursor modification. Product ion losses are inherently variable.. /// </summary> public static string EditStaticModDlg_OkDialog_The_variable_checkbox_only_applies_to_precursor_modification_Product_ion_losses_are_inherently_variable { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_The_variable_checkbox_only_applies_to_precursor_modific" + "ation_Product_ion_losses_are_inherently_variable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is a Unimod modification with the same settings.. /// </summary> public static string EditStaticModDlg_OkDialog_There_is_a_Unimod_modification_with_the_same_settings { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_There_is_a_Unimod_modification_with_the_same_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is an existing modification with the same settings:. /// </summary> public static string EditStaticModDlg_OkDialog_There_is_an_existing_modification_with_the_same_settings { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_There_is_an_existing_modification_with_the_same_setting" + "s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This modification does not match the Unimod specifications for &apos;{0}&apos;.. /// </summary> public static string EditStaticModDlg_OkDialog_This_modification_does_not_match_the_Unimod_specifications_for___0___ { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_This_modification_does_not_match_the_Unimod_specificati" + "ons_for___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unimod. /// </summary> public static string EditStaticModDlg_OkDialog_Unimod { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Unimod", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use non-standard settings for this name?. /// </summary> public static string EditStaticModDlg_OkDialog_Use_non_standard_settings_for_this_name { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Use_non_standard_settings_for_this_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Variable modifications must specify amino acid or terminus.. /// </summary> public static string EditStaticModDlg_OkDialog_Variable_modifications_must_specify_amino_acid_or_terminus { get { return ResourceManager.GetString("EditStaticModDlg_OkDialog_Variable_modifications_must_specify_amino_acid_or_termi" + "nus", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &apos;{0}&apos; is not a recognized Unimod name.. /// </summary> public static string EditStaticModDlg_SetModification___0___is_not_a_recognized_Unimod_name_ { get { return ResourceManager.GetString("EditStaticModDlg_SetModification___0___is_not_a_recognized_Unimod_name_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Show all...&gt;. /// </summary> public static string EditStaticModDlg_UpdateListAvailableMods_Show_all { get { return ResourceManager.GetString("EditStaticModDlg_UpdateListAvailableMods_Show_all", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Show common...&gt;. /// </summary> public static string EditStaticModDlg_UpdateListAvailableMods_Show_common { get { return ResourceManager.GetString("EditStaticModDlg_UpdateListAvailableMods_Show_common", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid element locator.. /// </summary> public static string ElementLocator_Parse__0__is_not_a_valid_element_locator_ { get { return ResourceManager.GetString("ElementLocator_Parse__0__is_not_a_valid_element_locator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to End of text. /// </summary> public static string ElementLocator_ReadQuotedString_End_of_text { get { return ResourceManager.GetString("ElementLocator_ReadQuotedString_End_of_text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} was unexpected in &apos;{1}&apos; at position {2}.. /// </summary> public static string ElementLocator_UnexpectedException__0__was_unexpected_in___1___at_position__2__ { get { return ResourceManager.GetString("ElementLocator_UnexpectedException__0__was_unexpected_in___1___at_position__2__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap EmptyList { get { object obj = ResourceManager.GetObject("EmptyList", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to {0} new proteins. /// </summary> public static string EmptyProteinsDlg_EmptyProteinsDlg__0__new_proteins { get { return ResourceManager.GetString("EmptyProteinsDlg_EmptyProteinsDlg__0__new_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 new protein. /// </summary> public static string EmptyProteinsDlg_EmptyProteinsDlg_1_new_protein { get { return ResourceManager.GetString("EmptyProteinsDlg_EmptyProteinsDlg_1_new_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to EncyclopeDIA Libraries. /// </summary> public static string EncyclopediaLibrary_FILTER_ELIB_EncyclopeDIA_Libraries { get { return ResourceManager.GetString("EncyclopediaLibrary_FILTER_ELIB_EncyclopeDIA_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to EncyclopeDIA Library. /// </summary> public static string EncyclopediaSpec_FILTER_ELIB_EncyclopeDIA_Library { get { return ResourceManager.GetString("EncyclopediaSpec_FILTER_ELIB_EncyclopeDIA_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Enzyme must have C-terminal cleavage to have C-terminal restrictions.. /// </summary> public static string Enzyme_Validate_Enzyme_must_have_C_terminal_cleavage_to_have_C_terminal_restrictions_ { get { return ResourceManager.GetString("Enzyme_Validate_Enzyme_must_have_C_terminal_cleavage_to_have_C_terminal_restricti" + "ons_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Enzyme must have N-terminal cleavage to have N-terminal restrictions.. /// </summary> public static string Enzyme_Validate_Enzyme_must_have_N_terminal_cleavage_to_have_N_terminal_restrictions_ { get { return ResourceManager.GetString("Enzyme_Validate_Enzyme_must_have_N_terminal_cleavage_to_have_N_terminal_restricti" + "ons_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Enzymes must have at least one cleavage point.. /// </summary> public static string Enzyme_Validate_Enzymes_must_have_at_least_one_cleavage_point { get { return ResourceManager.GetString("Enzyme_Validate_Enzymes_must_have_at_least_one_cleavage_point", resourceCulture); } } /// <summary> /// Looks up a localized string similar to En&amp;zymes:. /// </summary> public static string EnzymeList_Label_Enzymes { get { return ResourceManager.GetString("EnzymeList_Label_Enzymes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Enzymes. /// </summary> public static string EnzymeList_Title_Edit_Enzymes { get { return ResourceManager.GetString("EnzymeList_Title_Edit_Enzymes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Caused by ---&gt;. /// </summary> public static string ExceptionDialog_Caused_by_____ { get { return ResourceManager.GetString("ExceptionDialog_Caused_by_____", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Expand { get { object obj = ResourceManager.GetObject("Expand", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Invalid extended peptide format {0}. /// </summary> public static string ExPeptideRowReader_CalcTransitionInfo_Invalid_extended_peptide_format__0__ { get { return ResourceManager.GetString("ExPeptideRowReader_CalcTransitionInfo_Invalid_extended_peptide_format__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope labeled entry found without matching settings.. /// </summary> public static string ExPeptideRowReader_Create_Isotope_labeled_entry_found_without_matching_settings { get { return ResourceManager.GetString("ExPeptideRowReader_Create_Isotope_labeled_entry_found_without_matching_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the Modifications tab in Transition Settings.. /// </summary> public static string ExPeptideRowReaderCreateCheck_the_Modifications_tab_in_Transition_Settings { get { return ResourceManager.GetString("ExPeptideRowReaderCreateCheck_the_Modifications_tab_in_Transition_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modification type {0} not found.. /// </summary> public static string ExplicitMods_ChangeModifications_Modification_type__0__not_found { get { return ResourceManager.GetString("ExplicitMods_ChangeModifications_Modification_type__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to At least one chromatogram type must be selected.. /// </summary> public static string ExportChromatogramDlg_OkDialog_At_least_one_chromatogram_type_must_be_selected { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_At_least_one_chromatogram_type_must_be_selected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to At least one file must be selected. /// </summary> public static string ExportChromatogramDlg_OkDialog_At_least_one_file_must_be_selected { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_At_least_one_file_must_be_selected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chromatogram Export Files. /// </summary> public static string ExportChromatogramDlg_OkDialog_Chromatogram_Export_Files { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_Chromatogram_Export_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Chromatograms. /// </summary> public static string ExportChromatogramDlg_OkDialog_Export_Chromatogram { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_Export_Chromatogram", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Chromatograms. /// </summary> public static string ExportChromatogramDlg_OkDialog_Exporting_Chromatograms { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_Exporting_Chromatograms", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to save chromatograms to {0}.. /// </summary> public static string ExportChromatogramDlg_OkDialog_Failed_attempting_to_save_chromatograms_to__0__ { get { return ResourceManager.GetString("ExportChromatogramDlg_OkDialog_Failed_attempting_to_save_chromatograms_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to export.. /// </summary> public static string ExportDlgProperties_PerformLongExport_An_error_occurred_attempting_to_export { get { return ResourceManager.GetString("ExportDlgProperties_PerformLongExport_An_error_occurred_attempting_to_export", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting Methods. /// </summary> public static string ExportDlgProperties_PerformLongExport_Exporting_Methods { get { return ResourceManager.GetString("ExportDlgProperties_PerformLongExport_Exporting_Methods", resourceCulture); } } /// <summary> /// Looks up a localized string similar to IsolationList. /// </summary> public static string ExportFileTypeExtension_LOCALIZED_VALUES_IsolationList { get { return ResourceManager.GetString("ExportFileTypeExtension_LOCALIZED_VALUES_IsolationList", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List. /// </summary> public static string ExportFileTypeExtension_LOCALIZED_VALUES_List { get { return ResourceManager.GetString("ExportFileTypeExtension_LOCALIZED_VALUES_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Method. /// </summary> public static string ExportFileTypeExtension_LOCALIZED_VALUES_Method { get { return ResourceManager.GetString("ExportFileTypeExtension_LOCALIZED_VALUES_Method", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invariant. /// </summary> public static string ExportLiveReportDlg_ExportLiveReportDlg_Invariant { get { return ResourceManager.GetString("ExportLiveReportDlg_ExportLiveReportDlg_Invariant", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preview: . /// </summary> public static string ExportLiveReportDlg_ShowPreview_Preview__ { get { return ResourceManager.GetString("ExportLiveReportDlg_ShowPreview_Preview__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Method. /// </summary> public static string ExportMethodDlg_btnBrowseTemplate_Click__0__Method { get { return ResourceManager.GetString("ExportMethodDlg_btnBrowseTemplate_Click__0__Method", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Method Template. /// </summary> public static string ExportMethodDlg_btnBrowseTemplate_Click_Method_Template { get { return ResourceManager.GetString("ExportMethodDlg_btnBrowseTemplate_Click_Method_Template", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The chosen folder does not appear to contain a Bruker TOF method template. The folder is expected to have a .m extension, and contain the file submethods.xml.. /// </summary> public static string ExportMethodDlg_btnBrowseTemplate_Click_The_chosen_folder_does_not_appear_to_contain_a_Bruker_TOF_method_template___The_folder_is_expected_to_have_a__m_extension__and_contain_the_file_submethods_xml_ { get { return ResourceManager.GetString("ExportMethodDlg_btnBrowseTemplate_Click_The_chosen_folder_does_not_appear_to_cont" + "ain_a_Bruker_TOF_method_template___The_folder_is_expected_to_have_a__m_extension" + "__and_contain_the_file_submethods_xml_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The chosen folder does not appear to contain an Agilent QQQ method template. The folder is expected to have a .m extension, and contain the file qqqacqmethod.xsd.. /// </summary> public static string ExportMethodDlg_btnBrowseTemplate_Click_The_chosen_folder_does_not_appear_to_contain_an_Agilent_QQQ_method_template_The_folder_is_expected_to_have_a_m_extension_and_contain_the_file_qqqacqmethod_xsd { get { return ResourceManager.GetString("ExportMethodDlg_btnBrowseTemplate_Click_The_chosen_folder_does_not_appear_to_cont" + "ain_an_Agilent_QQQ_method_template_The_folder_is_expected_to_have_a_m_extension_" + "and_contain_the_file_qqqacqmethod_xsd", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred.. /// </summary> public static string ExportMethodDlg_btnGraph_Click_An_error_occurred_ { get { return ResourceManager.GetString("ExportMethodDlg_btnGraph_Click_An_error_occurred_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Grouping peptides by protein has not yet been implemented for scheduled methods.. /// </summary> public static string ExportMethodDlg_cbIgnoreProteins_CheckedChanged_Grouping_peptides_by_protein_has_not_yet_been_implemented_for_scheduled_methods_ { get { return ResourceManager.GetString("ExportMethodDlg_cbIgnoreProteins_CheckedChanged_Grouping_peptides_by_protein_has_" + "not_yet_been_implemented_for_scheduled_methods_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export of DIA isolation lists is not yet supported for {0}.. /// </summary> public static string ExportMethodDlg_comboInstrument_SelectedIndexChanged_Export_of_DIA_isolation_lists_is_not_yet_supported_for__0__ { get { return ResourceManager.GetString("ExportMethodDlg_comboInstrument_SelectedIndexChanged_Export_of_DIA_isolation_list" + "s_is_not_yet_supported_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export a method with extra transitions for finding an optimal value.. /// </summary> public static string ExportMethodDlg_comboOptimizing_SelectedIndexChanged_Export_a_method_with_extra_transitions_for_finding_an_optimal_value_ { get { return ResourceManager.GetString("ExportMethodDlg_comboOptimizing_SelectedIndexChanged_Export_a_method_with_extra_t" + "ransitions_for_finding_an_optimal_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimizing for {0} will produce an additional {1} transitions per transition.. /// </summary> public static string ExportMethodDlg_comboOptimizing_SelectedIndexChanged_Optimizing_for__0__will_produce_an_additional__1__transitions_per_transition_ { get { return ResourceManager.GetString("ExportMethodDlg_comboOptimizing_SelectedIndexChanged_Optimizing_for__0__will_prod" + "uce_an_additional__1__transitions_per_transition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the calculator settings.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Check_the_calculator_settings { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Check_the_calculator_setting" + "s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check to make sure the document contains times for all of the required standard peptides.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Check_to_make_sure_the_document_contains_times_for_all_of_the_required_standard_peptides { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Check_to_make_sure_the_docum" + "ent_contains_times_for_all_of_the_required_standard_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time prediction calculator is unable to score.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Retention_time_prediction_calculator_is_unable_to_score { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Retention_time_prediction_ca" + "lculator_is_unable_to_score", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time predictor is unable to auto-calculate a regression.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Retention_time_predictor_is_unable_to_auto_calculate_a_regression { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Retention_time_predictor_is_" + "unable_to_auto_calculate_a_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduled methods are not supported for the selected instrument.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Sched_Not_Supported_Err_Text { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Sched_Not_Supported_Err_Text" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduled methods are not yet supported for DIA acquisition.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_Scheduled_methods_are_not_yet_supported_for_DIA_acquisition { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_Scheduled_methods_are_not_ye" + "t_supported_for_DIA_acquisition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to To export a scheduled method, you must first choose a retention time predictor in Peptide Settings / Prediction.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_you_must_first_choose_a_retention_time_predictor_in_Peptide_Settings_Prediction { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_y" + "ou_must_first_choose_a_retention_time_predictor_in_Peptide_Settings_Prediction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to To export a scheduled method, you must first choose a retention time predictor in Peptide Settings / Prediction, or import results for all peptides in the document.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_you_must_first_choose_a_retention_time_predictor_in_Peptide_Settings_Prediction_or_import_results_for_all_peptides_in_the_document { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_y" + "ou_must_first_choose_a_retention_time_predictor_in_Peptide_Settings_Prediction_o" + "r_import_results_for_all_peptides_in_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to To export a scheduled method, you must first import results for all peptides in the document.. /// </summary> public static string ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_you_must_first_import_results_for_all_peptides_in_the_document { get { return ResourceManager.GetString("ExportMethodDlg_comboTargetType_SelectedIndexChanged_To_export_a_scheduled_list_y" + "ou_must_first_import_results_for_all_peptides_in_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ma&amp;x concurrent precursors:. /// </summary> public static string ExportMethodDlg_CONCUR_PREC_TXT { get { return ResourceManager.GetString("ExportMethodDlg_CONCUR_PREC_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ma&amp;x concurrent transitions:. /// </summary> public static string ExportMethodDlg_CONCUR_TRANS_TXT { get { return ResourceManager.GetString("ExportMethodDlg_CONCUR_TRANS_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Dwell time (ms):. /// </summary> public static string ExportMethodDlg_DWELL_TIME_TXT { get { return ResourceManager.GetString("ExportMethodDlg_DWELL_TIME_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Isolation List. /// </summary> public static string ExportMethodDlg_ExportMethodDlg_Export_Isolation_List { get { return ResourceManager.GetString("ExportMethodDlg_ExportMethodDlg_Export_Isolation_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Transition List. /// </summary> public static string ExportMethodDlg_ExportMethodDlg_Export_Transition_List { get { return ResourceManager.GetString("ExportMethodDlg_ExportMethodDlg_Export_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A template file is required to export a method.. /// </summary> public static string ExportMethodDlg_OkDialog_A_template_file_is_required_to_export_a_method { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_A_template_file_is_required_to_export_a_method", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All targets must have an ion mobility value. These can be set explicitly or contained in an ion mobility library or spectral library. The following ion mobility values are missing:. /// </summary> public static string ExportMethodDlg_OkDialog_All_targets_must_have_an_ion_mobility_value__These_can_be_set_explicitly_or_contained_in_an_ion_mobility_library_or_spectral_library__The_following_ion_mobility_values_are_missing_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_All_targets_must_have_an_ion_mobility_value__These_can_b" + "e_set_explicitly_or_contained_in_an_ion_mobility_library_or_spectral_library__Th" + "e_following_ion_mobility_values_are_missing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to continue?. /// </summary> public static string ExportMethodDlg_OkDialog_Are_you_sure_you_want_to_continue { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Are_you_sure_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to continue?. /// </summary> public static string ExportMethodDlg_OkDialog_Are_you_sure_you_want_to_continue_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Are_you_sure_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision Energy:. /// </summary> public static string ExportMethodDlg_OkDialog_Collision_Energy { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation Voltage:. /// </summary> public static string ExportMethodDlg_OkDialog_Compensation_Voltage_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Compensation_Voltage_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation voltage optimization should be run on one transition per precursor. The best transition could not be determined for the following precursors:. /// </summary> public static string ExportMethodDlg_OkDialog_Compensation_voltage_optimization_should_be_run_on_one_transition_per_peptide__and_the_best_transition_cannot_be_determined_for_the_following_precursors_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Compensation_voltage_optimization_should_be_run_on_one_t" + "ransition_per_peptide__and_the_best_transition_cannot_be_determined_for_the_foll" + "owing_precursors_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decluster Potential:. /// </summary> public static string ExportMethodDlg_OkDialog_Declustering_Potential { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export {0} Method. /// </summary> public static string ExportMethodDlg_OkDialog_Export__0__Method { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Export__0__Method", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export of DIA method is not supported for {0}.. /// </summary> public static string ExportMethodDlg_OkDialog_Export_of_DIA_method_is_not_supported_for__0__ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Export_of_DIA_method_is_not_supported_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation List. /// </summary> public static string ExportMethodDlg_OkDialog_Isolation_List { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Isolation_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Method File. /// </summary> public static string ExportMethodDlg_OkDialog_Method_File { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Method_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string ExportMethodDlg_OkDialog_None { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OK. /// </summary> public static string ExportMethodDlg_OkDialog_OK { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_OK", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Orbitrap. /// </summary> public static string ExportMethodDlg_OkDialog_Orbitrap { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Orbitrap", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Provide transition ranking information through imported results, a spectral library, or choose only one target transition per precursor.. /// </summary> public static string ExportMethodDlg_OkDialog_Provide_transition_ranking_information_through_imported_results__a_spectral_library__or_choose_only_one_target_transition_per_precursor_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Provide_transition_ranking_information_through_imported_" + "results__a_spectral_library__or_choose_only_one_target_transition_per_precursor_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The DIA isolation list must have prespecified windows.. /// </summary> public static string ExportMethodDlg_OkDialog_The_DIA_isolation_list_must_have_prespecified_windows_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_DIA_isolation_list_must_have_prespecified_windows_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not contain all of the retention time standard peptides.. /// </summary> public static string ExportMethodDlg_OkDialog_The_document_does_not_contain_all_of_the_retention_time_standard_peptides { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_document_does_not_contain_all_of_the_retention_time_" + "standard_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The folder {0} does not appear to contain a Bruker TOF method template. The folder is expected to have a .m extension, and contain the file submethods.xml.. /// </summary> public static string ExportMethodDlg_OkDialog_The_folder__0__does_not_appear_to_contain_a_Bruker_TOF_method_template___The_folder_is_expected_to_have_a__m_extension__and_contain_the_file_submethods_xml_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_folder__0__does_not_appear_to_contain_a_Bruker_TOF_m" + "ethod_template___The_folder_is_expected_to_have_a__m_extension__and_contain_the_" + "file_submethods_xml_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The folder {0} does not appear to contain an Agilent QQQ method template. The folder is expected to have a .m extension, and contain the file qqqacqmethod.xsd.. /// </summary> public static string ExportMethodDlg_OkDialog_The_folder__0__does_not_appear_to_contain_an_Agilent_QQQ_method_template_The_folder_is_expected_to_have_a_m_extension_and_contain_the_file_qqqacqmethod_xsd { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_folder__0__does_not_appear_to_contain_an_Agilent_QQQ" + "_method_template_The_folder_is_expected_to_have_a_m_extension_and_contain_the_fi" + "le_qqqacqmethod_xsd", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor mass analyzer type is not set to {0} in Transition Settings (under the Full Scan tab).. /// </summary> public static string ExportMethodDlg_OkDialog_The_precursor_mass_analyzer_type_is_not_set_to__0__in_Transition_Settings_under_the_Full_Scan_tab { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_precursor_mass_analyzer_type_is_not_set_to__0__in_Tr" + "ansition_Settings_under_the_Full_Scan_tab", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The product mass analyzer type is not set to {0} in Transition Settings (under the Full Scan tab).. /// </summary> public static string ExportMethodDlg_OkDialog_The_product_mass_analyzer_type_is_not_set_to__0__in_Transition_Settings_under_the_Full_Scan_tab { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_product_mass_analyzer_type_is_not_set_to__0__in_Tran" + "sition_Settings_under_the_Full_Scan_tab", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The settings for this document do not match the instrument type {0}:. /// </summary> public static string ExportMethodDlg_OkDialog_The_settings_for_this_document_do_not_match_the_instrument_type__0__ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_settings_for_this_document_do_not_match_the_instrume" + "nt_type__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The template file {0} does not exist.. /// </summary> public static string ExportMethodDlg_OkDialog_The_template_file__0__does_not_exist { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_The_template_file__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to TOF. /// </summary> public static string ExportMethodDlg_OkDialog_TOF { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_TOF", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition List. /// </summary> public static string ExportMethodDlg_OkDialog_Transition_List { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to use the defaults instead?. /// </summary> public static string ExportMethodDlg_OkDialog_Would_you_like_to_use_the_defaults_instead { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Would_you_like_to_use_the_defaults_instead", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are missing any optimized compensation voltages for the following:. /// </summary> public static string ExportMethodDlg_OkDialog_You_are_missing_any_optimized_compensation_voltages_for_the_following_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_are_missing_any_optimized_compensation_voltages_for_" + "the_following_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are missing compensation voltages for the following:. /// </summary> public static string ExportMethodDlg_OkDialog_You_are_missing_compensation_voltages_for_the_following_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_are_missing_compensation_voltages_for_the_following_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are missing fine tune optimized compensation voltages.. /// </summary> public static string ExportMethodDlg_OkDialog_You_are_missing_fine_tune_optimized_compensation_voltages_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_are_missing_fine_tune_optimized_compensation_voltage" + "s_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are missing fine tune optimized compensation voltages for the following:. /// </summary> public static string ExportMethodDlg_OkDialog_You_are_missing_fine_tune_optimized_compensation_voltages_for_the_following_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_are_missing_fine_tune_optimized_compensation_voltage" + "s_for_the_following_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are missing medium tune optimized compensation voltages for the following:. /// </summary> public static string ExportMethodDlg_OkDialog_You_are_missing_medium_tune_optimized_compensation_voltages_for_the_following_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_are_missing_medium_tune_optimized_compensation_volta" + "ges_for_the_following_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You can set explicit compensation voltages for these, or add their values to a document optimization library in Transition Settings under the Prediction tab.. /// </summary> public static string ExportMethodDlg_OkDialog_You_can_set_explicit_compensation_voltages_for_these__or_add_their_values_to_a_document_optimization_library_in_Transition_Settings_under_the_Prediction_tab_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_can_set_explicit_compensation_voltages_for_these__or" + "_add_their_values_to_a_document_optimization_library_in_Transition_Settings_unde" + "r_the_Prediction_tab_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You have only rough tune optimized compensation voltages.. /// </summary> public static string ExportMethodDlg_OkDialog_You_have_only_rough_tune_optimized_compensation_voltages_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_have_only_rough_tune_optimized_compensation_voltages" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You will not be able to use retention time prediction with acquired results.. /// </summary> public static string ExportMethodDlg_OkDialog_You_will_not_be_able_to_use_retention_time_prediction_with_acquired_results { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_You_will_not_be_able_to_use_retention_time_prediction_wi" + "th_acquired_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Your document does not contain compensation voltage results, but compensation voltage is set under transition settings.. /// </summary> public static string ExportMethodDlg_OkDialog_Your_document_does_not_contain_compensation_voltage_results__but_compensation_voltage_is_set_under_transition_settings_ { get { return ResourceManager.GetString("ExportMethodDlg_OkDialog_Your_document_does_not_contain_compensation_voltage_resu" + "lts__but_compensation_voltage_is_set_under_transition_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ma&amp;x precursors per sample injection:. /// </summary> public static string ExportMethodDlg_PREC_PER_SAMPLE_INJ_TXT { get { return ResourceManager.GetString("ExportMethodDlg_PREC_PER_SAMPLE_INJ_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Run &amp;duration (min):. /// </summary> public static string ExportMethodDlg_RUN_DURATION_TXT { get { return ResourceManager.GetString("ExportMethodDlg_RUN_DURATION_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduled. /// </summary> public static string ExportMethodDlg_SetMethodType_Scheduled { get { return ResourceManager.GetString("ExportMethodDlg_SetMethodType_Scheduled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard. /// </summary> public static string ExportMethodDlg_SetMethodType_Standard { get { return ResourceManager.GetString("ExportMethodDlg_SetMethodType_Standard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiple methods is not yet supported for {0}.. /// </summary> public static string ExportMethodDlg_StrategyCheckChanged_Multiple_methods_is_not_yet_supported_for__0__ { get { return ResourceManager.GetString("ExportMethodDlg_StrategyCheckChanged_Multiple_methods_is_not_yet_supported_for__0" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only one method can be exported in DIA mode.. /// </summary> public static string ExportMethodDlg_StrategyCheckChanged_Only_one_method_can_be_exported_in_DIA_mode { get { return ResourceManager.GetString("ExportMethodDlg_StrategyCheckChanged_Only_one_method_can_be_exported_in_DIA_mode", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ma&amp;x transitions per sample injection:. /// </summary> public static string ExportMethodDlg_TRANS_PER_SAMPLE_INJ_TXT { get { return ResourceManager.GetString("ExportMethodDlg_TRANS_PER_SAMPLE_INJ_TXT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor {0} for {1} has {2} transitions, which exceeds the current maximum {3}.. /// </summary> public static string ExportMethodDlg_ValidatePrecursorFit_The_precursor__0__for__1__has__2__transitions__which_exceeds_the_current_maximum__3__ { get { return ResourceManager.GetString("ExportMethodDlg_ValidatePrecursorFit_The_precursor__0__for__1__has__2__transition" + "s__which_exceeds_the_current_maximum__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor {0} for {1} requires {2} transitions to optimize, which exceeds the current maximum {3}.. /// </summary> public static string ExportMethodDlg_ValidatePrecursorFit_The_precursor__0__for__1__requires__2__transitions_to_optimize__which_exceeds_the_current_maximum__3__ { get { return ResourceManager.GetString("ExportMethodDlg_ValidatePrecursorFit_The_precursor__0__for__1__requires__2__trans" + "itions_to_optimize__which_exceeds_the_current_maximum__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a value.. /// </summary> public static string ExportMethodDlg_ValidateSettings__0__must_contain_a_value { get { return ResourceManager.GetString("ExportMethodDlg_ValidateSettings__0__must_contain_a_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot export fine tune transition list. The following precursors are missing medium tune results:. /// </summary> public static string ExportMethodDlg_ValidateSettings_Cannot_export_fine_tune_transition_list__The_following_precursors_are_missing_medium_tune_results_ { get { return ResourceManager.GetString("ExportMethodDlg_ValidateSettings_Cannot_export_fine_tune_transition_list__The_fol" + "lowing_precursors_are_missing_medium_tune_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot export medium tune transition list. The following precursors are missing rough tune results:. /// </summary> public static string ExportMethodDlg_ValidateSettings_Cannot_export_medium_tune_transition_list__The_following_precursors_are_missing_rough_tune_results_ { get { return ResourceManager.GetString("ExportMethodDlg_ValidateSettings_Cannot_export_medium_tune_transition_list__The_f" + "ollowing_precursors_are_missing_rough_tune_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The {0} instrument lacks support for direct method export for triggered acquisition.. /// </summary> public static string ExportMethodDlg_VerifySchedulingAllowed_The__0__instrument_lacks_support_for_direct_method_export_for_triggered_acquisition_ { get { return ResourceManager.GetString("ExportMethodDlg_VerifySchedulingAllowed_The__0__instrument_lacks_support_for_dire" + "ct_method_export_for_triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current document contains peptides without enough information to rank transitions for triggered acquisition.. /// </summary> public static string ExportMethodDlg_VerifySchedulingAllowed_The_current_document_contains_peptides_without_enough_information_to_rank_transitions_for_triggered_acquisition_ { get { return ResourceManager.GetString("ExportMethodDlg_VerifySchedulingAllowed_The_current_document_contains_peptides_wi" + "thout_enough_information_to_rank_transitions_for_triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument type {0} does not support triggered acquisition.. /// </summary> public static string ExportMethodDlg_VerifySchedulingAllowed_The_instrument_type__0__does_not_support_triggered_acquisition_ { get { return ResourceManager.GetString("ExportMethodDlg_VerifySchedulingAllowed_The_instrument_type__0__does_not_support_" + "triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Triggered acquistion requires a spectral library or imported results in order to rank transitions.. /// </summary> public static string ExportMethodDlg_VerifySchedulingAllowed_Triggered_acquistion_requires_a_spectral_library_or_imported_results_in_order_to_rank_transitions_ { get { return ResourceManager.GetString("ExportMethodDlg_VerifySchedulingAllowed_Triggered_acquistion_requires_a_spectral_" + "library_or_imported_results_in_order_to_rank_transitions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must export a {0} transition list and manually import it into a method file using vendor software.. /// </summary> public static string ExportMethodDlg_VerifySchedulingAllowed_You_must_export_a__0__transition_list_and_manually_import_it_into_a_method_file_using_vendor_software_ { get { return ResourceManager.GetString("ExportMethodDlg_VerifySchedulingAllowed_You_must_export_a__0__transition_list_and" + "_manually_import_it_into_a_method_file_using_vendor_software_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Concurrent frames. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Concurrent_frames { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Concurrent_frames", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max sampling times. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Max_sampling_times { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Max_sampling_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mean sampling times. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Mean_sampling_times { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Mean_sampling_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Redundancy of targets. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Redundancy_of_targets { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Redundancy_of_targets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Target table. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Target_table { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Target_table", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets per frame. /// </summary> public static string ExportMethodScheduleGraph_ExportMethodScheduleGraph_Targets_per_frame { get { return ResourceManager.GetString("ExportMethodScheduleGraph_ExportMethodScheduleGraph_Targets_per_frame", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduled. /// </summary> public static string ExportMethodTypeExtension_LOCALIZED_VALUES_Scheduled { get { return ResourceManager.GetString("ExportMethodTypeExtension_LOCALIZED_VALUES_Scheduled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard. /// </summary> public static string ExportMethodTypeExtension_LOCALIZED_VALUES_Standard { get { return ResourceManager.GetString("ExportMethodTypeExtension_LOCALIZED_VALUES_Standard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Triggered. /// </summary> public static string ExportMethodTypeExtension_LOCALIZED_VALUES_Triggered { get { return ResourceManager.GetString("ExportMethodTypeExtension_LOCALIZED_VALUES_Triggered", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collision Energy. /// </summary> public static string ExportOptimize_CE_Collision_Energy { get { return ResourceManager.GetString("ExportOptimize_CE_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation Voltage. /// </summary> public static string ExportOptimize_COV_Compensation_Voltage { get { return ResourceManager.GetString("ExportOptimize_COV_Compensation_Voltage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fine Tune. /// </summary> public static string ExportOptimize_COV_FINE_Fine_Tune { get { return ResourceManager.GetString("ExportOptimize_COV_FINE_Fine_Tune", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Medium Tune. /// </summary> public static string ExportOptimize_COV_MEDIUM_Medium_Tune { get { return ResourceManager.GetString("ExportOptimize_COV_MEDIUM_Medium_Tune", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rough Tune. /// </summary> public static string ExportOptimize_COV_ROUGH_Rough_Tune { get { return ResourceManager.GetString("ExportOptimize_COV_ROUGH_Rough_Tune", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Declustering Potential. /// </summary> public static string ExportOptimize_DP_Declustering_Potential { get { return ResourceManager.GetString("ExportOptimize_DP_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string ExportOptimize_NONE_None { get { return ResourceManager.GetString("ExportOptimize_NONE_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized instrument type {0}.. /// </summary> public static string ExportProperties_ExportFile_Unrecognized_instrument_type__0__ { get { return ResourceManager.GetString("ExportProperties_ExportFile_Unrecognized_instrument_type__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building report.... /// </summary> public static string ExportReportDlg_ExportReport_Building_report { get { return ResourceManager.GetString("ExportReportDlg_ExportReport_Building_report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed exporting to {0}. ///{1}. /// </summary> public static string ExportReportDlg_ExportReport_Failed_exporting_to { get { return ResourceManager.GetString("ExportReportDlg_ExportReport_Failed_exporting_to", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Generating Report. /// </summary> public static string ExportReportDlg_ExportReport_Generating_Report { get { return ResourceManager.GetString("ExportReportDlg_ExportReport_Generating_Report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Writing report.... /// </summary> public static string ExportReportDlg_ExportReport_Writing_report { get { return ResourceManager.GetString("ExportReportDlg_ExportReport_Writing_report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Analyzing document.... /// </summary> public static string ExportReportDlg_GetDatabase_Analyzing_document { get { return ResourceManager.GetString("ExportReportDlg_GetDatabase_Analyzing_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Generating Report Data. /// </summary> public static string ExportReportDlg_GetDatabase_Generating_Report_Data { get { return ResourceManager.GetString("ExportReportDlg_GetDatabase_Generating_Report_Data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The field {0} does not exist in this document.. /// </summary> public static string ExportReportDlg_GetExceptionDisplayMessage_The_field__0__does_not_exist_in_this_document { get { return ResourceManager.GetString("ExportReportDlg_GetExceptionDisplayMessage_The_field__0__does_not_exist_in_this_d" + "ocument", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Report. /// </summary> public static string ExportReportDlg_OkDialog_Export_Report { get { return ResourceManager.GetString("ExportReportDlg_OkDialog_Export_Report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred attempting to display the report &apos;{0}&apos;.. /// </summary> public static string ExportReportDlg_ShowPreview_An_unexpected_error_occurred_attempting_to_display_the_report___0___ { get { return ResourceManager.GetString("ExportReportDlg_ShowPreview_An_unexpected_error_occurred_attempting_to_display_th" + "e_report___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Report Definitions. /// </summary> public static string ExportReportDlg_ShowShare_Report_Definitions { get { return ResourceManager.GetString("ExportReportDlg_ShowShare_Report_Definitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Reports. /// </summary> public static string ExportReportDlg_ShowShare_Skyline_Reports { get { return ResourceManager.GetString("ExportReportDlg_ShowShare_Skyline_Reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Average. /// </summary> public static string ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Average { get { return ResourceManager.GetString("ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Average", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Single. /// </summary> public static string ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Single { get { return ResourceManager.GetString("ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Single", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Trends. /// </summary> public static string ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Trends { get { return ResourceManager.GetString("ExportSchedulingAlgorithmExtension_LOCALIZED_VALUES_Trends", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Average. /// </summary> public static string ExportStrategyExtension_LOCALIZED_VALUES_Average { get { return ResourceManager.GetString("ExportStrategyExtension_LOCALIZED_VALUES_Average", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Buckets. /// </summary> public static string ExportStrategyExtension_LOCALIZED_VALUES_Buckets { get { return ResourceManager.GetString("ExportStrategyExtension_LOCALIZED_VALUES_Buckets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Monoisotopic. /// </summary> public static string ExportStrategyExtension_LOCALIZED_VALUES_Monoisotopic { get { return ResourceManager.GetString("ExportStrategyExtension_LOCALIZED_VALUES_Monoisotopic", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein. /// </summary> public static string ExportStrategyExtension_LOCALIZED_VALUES_Protein { get { return ResourceManager.GetString("ExportStrategyExtension_LOCALIZED_VALUES_Protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Single. /// </summary> public static string ExportStrategyExtension_LOCALIZED_VALUES_Single { get { return ResourceManager.GetString("ExportStrategyExtension_LOCALIZED_VALUES_Single", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 2D Histogram. /// </summary> public static string Extensions_CustomToString__2D_Histogram { get { return ResourceManager.GetString("Extensions_CustomToString__2D_Histogram", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Histogram. /// </summary> public static string Extensions_CustomToString_Detections_Histogram { get { return ResourceManager.GetString("Extensions_CustomToString_Detections_Histogram", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicates. /// </summary> public static string Extensions_CustomToString_Detections_Replicates { get { return ResourceManager.GetString("Extensions_CustomToString_Detections_Replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Histogram. /// </summary> public static string Extensions_CustomToString_Histogram { get { return ResourceManager.GetString("Extensions_CustomToString_Histogram", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Comparison. /// </summary> public static string Extensions_CustomToString_Peptide_Comparison { get { return ResourceManager.GetString("Extensions_CustomToString_Peptide_Comparison", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Comparison. /// </summary> public static string Extensions_CustomToString_Replicate_Comparison { get { return ResourceManager.GetString("Extensions_CustomToString_Replicate_Comparison", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Run To Run Regression. /// </summary> public static string Extensions_CustomToString_Run_To_Run_Regression { get { return ResourceManager.GetString("Extensions_CustomToString_Run_To_Run_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduling. /// </summary> public static string Extensions_CustomToString_Scheduling { get { return ResourceManager.GetString("Extensions_CustomToString_Scheduling", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Score To Run Regression. /// </summary> public static string Extensions_CustomToString_Score_To_Run_Regression { get { return ResourceManager.GetString("Extensions_CustomToString_Score_To_Run_Regression", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ExternalTool { get { object obj = ResourceManager.GetObject("ExternalTool", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to {0} (load failed: {1}). /// </summary> public static string FailedChromGraphItem_FailedChromGraphItem__0__load_failed__1__ { get { return ResourceManager.GetString("FailedChromGraphItem_FailedChromGraphItem__0__load_failed__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} proteins and {1} peptides added. /// </summary> public static string FastaImporter_Import__0__proteins_and__1__peptides_added { get { return ResourceManager.GetString("FastaImporter_Import__0__proteins_and__1__peptides_added", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding protein {0}. /// </summary> public static string FastaImporter_Import_Adding_protein__0__ { get { return ResourceManager.GetString("FastaImporter_Import_Adding_protein__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check your settings to make sure you are using a library.. /// </summary> public static string FastaImporter_Import_Check_your_settings_to_make_sure_you_are_using_a_library_ { get { return ResourceManager.GetString("FastaImporter_Import_Check_your_settings_to_make_sure_you_are_using_a_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check your settings to make sure you are using a library and restrictive enough transition selection.. /// </summary> public static string FastaImporter_Import_Check_your_settings_to_make_sure_you_are_using_a_library_and_restrictive_enough_transition_selection_ { get { return ResourceManager.GetString("FastaImporter_Import_Check_your_settings_to_make_sure_you_are_using_a_library_and" + "_restrictive_enough_transition_selection_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error at or around line {0}: {1}. /// </summary> public static string FastaImporter_Import_Error_at_or_around_line__0____1_ { get { return ResourceManager.GetString("FastaImporter_Import_Error_at_or_around_line__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This import causes the document to contain more than {0:n0} peptides at line {1:n0}.. /// </summary> public static string FastaImporter_Import_This_import_causes_the_document_to_contain_more_than__0_n0__peptides_at_line__1_n0__ { get { return ResourceManager.GetString("FastaImporter_Import_This_import_causes_the_document_to_contain_more_than__0_n0__" + "peptides_at_line__1_n0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This import causes the document to contain more than {0:n0} transitions in {1:n0} peptides at line {2:n0}.. /// </summary> public static string FastaImporter_Import_This_import_causes_the_document_to_contain_more_than__0_n0__transitions_in__1_n0__peptides_at_line__2_n0__ { get { return ResourceManager.GetString("FastaImporter_Import_This_import_causes_the_document_to_contain_more_than__0_n0__" + "transitions_in__1_n0__peptides_at_line__2_n0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Last column does not contain a valid protein sequence. /// </summary> public static string FastaImporter_ToFasta_Last_column_does_not_contain_a_valid_protein_sequence { get { return ResourceManager.GetString("FastaImporter_ToFasta_Last_column_does_not_contain_a_valid_protein_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Too few columns found. /// </summary> public static string FastaImporter_ToFasta_Too_few_columns_found { get { return ResourceManager.GetString("FastaImporter_ToFasta_Too_few_columns_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides in different FASTA sequences may not be compared.. /// </summary> public static string FastaSequence_ComparePeptides_Peptides_in_different_FASTA_sequences_may_not_be_compared { get { return ResourceManager.GetString("FastaSequence_ComparePeptides_Peptides_in_different_FASTA_sequences_may_not_be_co" + "mpared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides without FASTA sequence information may not be compared.. /// </summary> public static string FastaSequence_ComparePeptides_Peptides_without_FASTA_sequence_information_may_not_be_compared { get { return ResourceManager.GetString("FastaSequence_ComparePeptides_Peptides_without_FASTA_sequence_information_may_not" + "_be_compared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A protein sequence may not be empty.. /// </summary> public static string FastaSequence_ValidateSequence_A_protein_sequence_may_not_be_empty { get { return ResourceManager.GetString("FastaSequence_ValidateSequence_A_protein_sequence_may_not_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A protein sequence may not contain the character &apos;{0}&apos; at {1}.. /// </summary> public static string FastaSequence_ValidateSequence_A_protein_sequence_may_not_contain_the_character__0__at__1__ { get { return ResourceManager.GetString("FastaSequence_ValidateSequence_A_protein_sequence_may_not_contain_the_character__" + "0__at__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to get peptide list from uncleaved FASTA sequence.. /// </summary> public static string FastaSeqV01_GetPeptideList_Attempt_to_get_peptide_list_from_uncleaved_FASTA_sequence { get { return ResourceManager.GetString("FastaSeqV01_GetPeptideList_Attempt_to_get_peptide_list_from_uncleaved_FASTA_seque" + "nce", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap File { get { object obj = ResourceManager.GetObject("File", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Directory could not be found: {0}. /// </summary> public static string FileEx_SafeDelete_Directory_could_not_be_found___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Directory_could_not_be_found___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File path is invalid: {0}. /// </summary> public static string FileEx_SafeDelete_File_path_is_invalid___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_File_path_is_invalid___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File path is too long: {0}. /// </summary> public static string FileEx_SafeDelete_File_path_is_too_long___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_File_path_is_too_long___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insufficient permission to delete file: {0}. /// </summary> public static string FileEx_SafeDelete_Insufficient_permission_to_delete_file___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Insufficient_permission_to_delete_file___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Path contains invalid characters: {0}. /// </summary> public static string FileEx_SafeDelete_Path_contains_invalid_characters___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Path_contains_invalid_characters___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Path is empty. /// </summary> public static string FileEx_SafeDelete_Path_is_empty { get { return ResourceManager.GetString("FileEx_SafeDelete_Path_is_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to delete directory: {0}. /// </summary> public static string FileEx_SafeDelete_Unable_to_delete_directory___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Unable_to_delete_directory___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to delete file which is in use: {0}. /// </summary> public static string FileEx_SafeDelete_Unable_to_delete_file_which_is_in_use___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Unable_to_delete_file_which_is_in_use___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to delete read-only file: {0}. /// </summary> public static string FileEx_SafeDelete_Unable_to_delete_read_only_file___0_ { get { return ResourceManager.GetString("FileEx_SafeDelete_Unable_to_delete_read_only_file___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot save to {0}.. /// </summary> public static string FileIterator_Init_Cannot_save_to__0__ { get { return ResourceManager.GetString("FileIterator_Init_Cannot_save_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected failure writing transitions.. /// </summary> public static string FileIterator_WriteTransition_Unexpected_failure_writing_transitions { get { return ResourceManager.GetString("FileIterator_WriteTransition_Unexpected_failure_writing_transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Graph. /// </summary> public static string FileProgressControl_btnRetry_Click_Graph { get { return ResourceManager.GetString("FileProgressControl_btnRetry_Click_Graph", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Log. /// </summary> public static string FileProgressControl_btnRetry_Click_Log { get { return ResourceManager.GetString("FileProgressControl_btnRetry_Click_Log", resourceCulture); } } /// <summary> /// Looks up a localized string similar to imported. /// </summary> public static string FileProgressControl_Finish_imported { get { return ResourceManager.GetString("FileProgressControl_Finish_imported", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There were {0} failed import attempts. ///. /// </summary> public static string FileProgressControl_GetErrorLog_ { get { return ResourceManager.GetString("FileProgressControl_GetErrorLog_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ///Here are the last 3 errors: ///. /// </summary> public static string FileProgressControl_GetErrorLog_2 { get { return ResourceManager.GetString("FileProgressControl_GetErrorLog_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}. {1}. /// </summary> public static string FileProgressControl_Number__0____1_ { get { return ResourceManager.GetString("FileProgressControl_Number__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to At {0}: ///{1} ///. /// </summary> public static string FileProgressControl_SetStatus_ { get { return ResourceManager.GetString("FileProgressControl_SetStatus_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cancel. /// </summary> public static string FileProgressControl_SetStatus_Cancel { get { return ResourceManager.GetString("FileProgressControl_SetStatus_Cancel", resourceCulture); } } /// <summary> /// Looks up a localized string similar to canceled. /// </summary> public static string FileProgressControl_SetStatus_canceled { get { return ResourceManager.GetString("FileProgressControl_SetStatus_canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to failed. /// </summary> public static string FileProgressControl_SetStatus_failed { get { return ResourceManager.GetString("FileProgressControl_SetStatus_failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retry. /// </summary> public static string FileProgressControl_SetStatus_Retry { get { return ResourceManager.GetString("FileProgressControl_SetStatus_Retry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to warning. /// </summary> public static string FileProgressControl_SetStatus_warning { get { return ResourceManager.GetString("FileProgressControl_SetStatus_warning", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot save to {0}. Check the path to make sure the directory exists.. /// </summary> public static string FileSaver_CanSave_Cannot_save_to__0__Check_the_path_to_make_sure_the_directory_exists { get { return ResourceManager.GetString("FileSaver_CanSave_Cannot_save_to__0__Check_the_path_to_make_sure_the_directory_ex" + "ists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot save to {0}. The file is read-only.. /// </summary> public static string FileSaver_CanSave_Cannot_save_to__0__The_file_is_read_only { get { return ResourceManager.GetString("FileSaver_CanSave_Cannot_save_to__0__The_file_is_read_only", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not replace file . /// </summary> public static string FileStreamManager_Commit_Could_not_replace_file_ { get { return ResourceManager.GetString("FileStreamManager_Commit_Could_not_replace_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected error opening {0}. /// </summary> public static string FileStreamManager_CreateStream_Unexpected_error_opening__0__ { get { return ResourceManager.GetString("FileStreamManager_CreateStream_Unexpected_error_opening__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Access Denied: unable to create a file in the folder &quot;{0}&quot;. Adjust the folder write permissions or retry the operation after moving or copying files to a different folder.. /// </summary> public static string FileStreamManager_GetTempFileName_Access_Denied__unable_to_create_a_file_in_the_folder___0____Adjust_the_folder_write_permissions_or_retry_the_operation_after_moving_or_copying_files_to_a_different_folder_ { get { return ResourceManager.GetString("FileStreamManager_GetTempFileName_Access_Denied__unable_to_create_a_file_in_the_f" + "older___0____Adjust_the_folder_write_permissions_or_retry_the_operation_after_mo" + "ving_or_copying_files_to_a_different_folder_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to create a temporary file in the folder {0} with the following error:. /// </summary> public static string FileStreamManager_GetTempFileName_Failed_attempting_to_create_a_temporary_file_in_the_folder__0__with_the_following_error_ { get { return ResourceManager.GetString("FileStreamManager_GetTempFileName_Failed_attempting_to_create_a_temporary_file_in" + "_the_folder__0__with_the_following_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Win32 Error: {0}. /// </summary> public static string FileStreamManager_GetTempFileName_Win32_Error__0__ { get { return ResourceManager.GetString("FileStreamManager_GetTempFileName_Win32_Error__0__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Filter { get { object obj = ResourceManager.GetObject("Filter", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to {0} molecules not matching the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__molecules_not_matching_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__molecules_not_matching_the_" + "current_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} peptides matching multiple proteins.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_matching_multiple_proteins { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_matching_multiple_" + "proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} peptides not matching the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_not_matching_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_not_matching_the_c" + "urrent_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} peptides without matching proteins.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_without_matching_proteins { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg__0__peptides_without_matching_p" + "roteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 molecule not matching the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_molecule_not_matching_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_molecule_not_matching_the_cur" + "rent_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 peptide matching multiple proteins.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_matching_multiple_proteins { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_matching_multiple_pro" + "teins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 peptide not matching the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_not_matching_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_not_matching_the_curr" + "ent_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 peptide without a matching protein.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_without_a_matching_protein { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_1_peptide_without_a_matching_pr" + "otein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Filter Molecules. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_Filter_Molecules { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_Filter_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Include all molecules. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_Include_all_molecules { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_Include_all_molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This molecule does not match the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_molecule_does_not_match_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_molecule_does_not_match_th" + "e_current_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide does not have a matching protein.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_does_not_have_a_matching_protein { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_does_not_have_a_ma" + "tching_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide does not match the current filter settings.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_does_not_match_the_current_filter_settings { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_does_not_match_the" + "_current_filter_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide matches multiple proteins.. /// </summary> public static string FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_matches_multiple_proteins { get { return ResourceManager.GetString("FilterMatchedPeptidesDlg_FilterMatchedPeptidesDlg_This_peptide_matches_multiple_p" + "roteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Filtered MIDAS Library. /// </summary> public static string FilterMidasLibraryDlg_btnBrowse_Click_Export_Filtered_MIDAS_Library { get { return ResourceManager.GetString("FilterMidasLibraryDlg_btnBrowse_Click_Export_Filtered_MIDAS_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A library with this name already exists.. /// </summary> public static string FilterMidasLibraryDlg_OkDialog_A_library_with_this_name_already_exists_ { get { return ResourceManager.GetString("FilterMidasLibraryDlg_OkDialog_A_library_with_this_name_already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must enter a name for the filtered library.. /// </summary> public static string FilterMidasLibraryDlg_OkDialog_You_must_enter_a_name_for_the_filtered_library_ { get { return ResourceManager.GetString("FilterMidasLibraryDlg_OkDialog_You_must_enter_a_name_for_the_filtered_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must enter a path for the filtered library.. /// </summary> public static string FilterMidasLibraryDlg_OkDialog_You_must_enter_a_path_for_the_filtered_library_ { get { return ResourceManager.GetString("FilterMidasLibraryDlg_OkDialog_You_must_enter_a_path_for_the_filtered_library_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Find { get { object obj = ResourceManager.GetObject("Find", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap FindNext { get { object obj = ResourceManager.GetObject("FindNext", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to &lt;&lt; Hide Ad&amp;vanced. /// </summary> public static string FindNodeDlg_AdvancedVisible_Hide_Advanced { get { return ResourceManager.GetString("FindNodeDlg_AdvancedVisible_Hide_Advanced", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show Ad&amp;vanced &gt;&gt;. /// </summary> public static string FindNodeDlg_AdvancedVisible_Show_Advanced { get { return ResourceManager.GetString("FindNodeDlg_AdvancedVisible_Show_Advanced", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find {0}.. /// </summary> public static string FindOptions_GetNotFoundMessage_Could_not_find__0__ { get { return ResourceManager.GetString("FindOptions_GetNotFoundMessage_Could_not_find__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find any of {0} items.. /// </summary> public static string FindOptions_GetNotFoundMessage_Could_not_find_any_of__0__items { get { return ResourceManager.GetString("FindOptions_GetNotFoundMessage_Could_not_find_any_of__0__items", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; could not be found.. /// </summary> public static string FindOptions_GetNotFoundMessage_The_text__0__could_not_be_found { get { return ResourceManager.GetString("FindOptions_GetNotFoundMessage_The_text__0__could_not_be_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Found {0} matches. /// </summary> public static string FindPredicate_FindAll_Found__0__matches { get { return ResourceManager.GetString("FindPredicate_FindAll_Found__0__matches", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Found 0 matches. /// </summary> public static string FindPredicate_FindAll_Found_0_matches { get { return ResourceManager.GetString("FindPredicate_FindAll_Found_0_matches", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Found 1 match. /// </summary> public static string FindPredicate_FindAll_Found_1_match { get { return ResourceManager.GetString("FindPredicate_FindAll_Found_1_match", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Searching for {0}. /// </summary> public static string FindPredicate_FindAll_Searching_for__0__ { get { return ResourceManager.GetString("FindPredicate_FindAll_Searching_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Bar Graph. /// </summary> public static string FoldChangeForm_BuildContextMenu_Bar_Graph { get { return ResourceManager.GetString("FoldChangeForm_BuildContextMenu_Bar_Graph", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Grid. /// </summary> public static string FoldChangeForm_BuildContextMenu_Grid { get { return ResourceManager.GetString("FoldChangeForm_BuildContextMenu_Grid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Settings. /// </summary> public static string FoldChangeForm_BuildContextMenu_Settings { get { return ResourceManager.GetString("FoldChangeForm_BuildContextMenu_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Volcano Plot. /// </summary> public static string FoldChangeForm_BuildContextMenu_Volcano_Plot { get { return ResourceManager.GetString("FoldChangeForm_BuildContextMenu_Volcano_Plot", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Folder { get { object obj = ResourceManager.GetObject("Folder", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to large. /// </summary> public static string FontSize_LARGE_large { get { return ResourceManager.GetString("FontSize_LARGE_large", resourceCulture); } } /// <summary> /// Looks up a localized string similar to normal. /// </summary> public static string FontSize_NORMAL_normal { get { return ResourceManager.GetString("FontSize_NORMAL_normal", resourceCulture); } } /// <summary> /// Looks up a localized string similar to small. /// </summary> public static string FontSize_SMALL_small { get { return ResourceManager.GetString("FontSize_SMALL_small", resourceCulture); } } /// <summary> /// Looks up a localized string similar to x-large. /// </summary> public static string FontSize_XLARGE_x_large { get { return ResourceManager.GetString("FontSize_XLARGE_x_large", resourceCulture); } } /// <summary> /// Looks up a localized string similar to x-small. /// </summary> public static string FontSize_XSMALL_x_small { get { return ResourceManager.GetString("FontSize_XSMALL_x_small", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Full Precision. /// </summary> public static string FormatSuggestion_FullPrecision_Full_Precision { get { return ResourceManager.GetString("FormatSuggestion_FullPrecision_Full_Precision", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Integer. /// </summary> public static string FormatSuggestion_Integer_Integer { get { return ResourceManager.GetString("FormatSuggestion_Integer_Integer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Percent. /// </summary> public static string FormatSuggestion_Percent_Percent { get { return ResourceManager.GetString("FormatSuggestion_Percent_Percent", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scientific. /// </summary> public static string FormatSuggestion_Scientific_Scientific { get { return ResourceManager.GetString("FormatSuggestion_Scientific_Scientific", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formulas are written in standard chemical notation, e.g. &quot;C2H6O&quot;. Heavy isotopes are indicated by a prime (e.g. C&apos; for C13) or double prime for less abundant stable iostopes (e.g. O&quot; for O17, O&apos; for O18).. /// </summary> public static string FormulaBox_FormulaHelpText_Formulas_are_written_in_standard_chemical_notation__e_g___C2H6O____Heavy_isotopes_are_indicated_by_a_prime__e_g__C__for_C13__or_double_prime_for_less_abundant_stable_iostopes__e_g__O__for_O17__O__for_O18__ { get { return ResourceManager.GetString("FormulaBox_FormulaHelpText_Formulas_are_written_in_standard_chemical_notation__e_" + "g___C2H6O____Heavy_isotopes_are_indicated_by_a_prime__e_g__C__for_C13__or_double" + "_prime_for_less_abundant_stable_iostopes__e_g__O__for_O17__O__for_O18__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formula Help. /// </summary> public static string FormulaBox_helpToolStripMenuItem_Click_Formula_Help { get { return ResourceManager.GetString("FormulaBox_helpToolStripMenuItem_Click_Formula_Help", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Fragment { get { object obj = ResourceManager.GetObject("Fragment", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap FragmentDecoy { get { object obj = ResourceManager.GetObject("FragmentDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap FragmentLib { get { object obj = ResourceManager.GetObject("FragmentLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap FragmentLibDecoy { get { object obj = ResourceManager.GetObject("FragmentLibDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Neutral losses must be greater than or equal to {0}.. /// </summary> public static string FragmentLoss_Validate_Neutral_losses_must_be_greater_than_or_equal_to__0__ { get { return ResourceManager.GetString("FragmentLoss_Validate_Neutral_losses_must_be_greater_than_or_equal_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Neutral losses must be less than or equal to {0}.. /// </summary> public static string FragmentLoss_Validate_Neutral_losses_must_be_less_than_or_equal_to__0__ { get { return ResourceManager.GetString("FragmentLoss_Validate_Neutral_losses_must_be_less_than_or_equal_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Neutral losses must specify a formula or valid monoisotopic and average masses.. /// </summary> public static string FragmentLoss_Validate_Neutral_losses_must_specify_a_formula_or_valid_monoisotopic_and_average_masses { get { return ResourceManager.GetString("FragmentLoss_Validate_Neutral_losses_must_specify_a_formula_or_valid_monoisotopic" + "_and_average_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to DIA. /// </summary> public static string FullScanAcquisitionExtension_LOCALIZED_VALUES_DIA { get { return ResourceManager.GetString("FullScanAcquisitionExtension_LOCALIZED_VALUES_DIA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string FullScanAcquisitionExtension_LOCALIZED_VALUES_None { get { return ResourceManager.GetString("FullScanAcquisitionExtension_LOCALIZED_VALUES_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targeted. /// </summary> public static string FullScanAcquisitionExtension_LOCALIZED_VALUES_Targeted { get { return ResourceManager.GetString("FullScanAcquisitionExtension_LOCALIZED_VALUES_Targeted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to DDA. /// </summary> public static string FullScanAcquisitionMethod_DDA_DDA { get { return ResourceManager.GetString("FullScanAcquisitionMethod_DDA_DDA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid Full Scan Acquisition Method. /// </summary> public static string FullScanAcquisitionMethod_FromName__0__is_not_a_valid_Full_Scan_Acquisition_Method { get { return ResourceManager.GetString("FullScanAcquisitionMethod_FromName__0__is_not_a_valid_Full_Scan_Acquisition_Metho" + "d", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Count. /// </summary> public static string FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_Count { get { return ResourceManager.GetString("FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_Count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_None { get { return ResourceManager.GetString("FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Percent. /// </summary> public static string FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_Percent { get { return ResourceManager.GetString("FullScanPrecursorIsotopesExtension_LOCALIZED_VALUES_Percent", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass &amp;Accuracy:. /// </summary> public static string FullScanSettingsControl_SetAnalyzerType_Mass__Accuracy_ { get { return ResourceManager.GetString("FullScanSettingsControl_SetAnalyzerType_Mass__Accuracy_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Full gradient chromatograms will take longer to import, consume more disk space, and may make peak picking less effective.. /// </summary> public static string FullScanSettingsControl_UpdateRetentionTimeFilterUi_Full_gradient_chromatograms_will_take_longer_to_import__consume_more_disk_space__and_may_make_peak_picking_less_effective_ { get { return ResourceManager.GetString("FullScanSettingsControl_UpdateRetentionTimeFilterUi_Full_gradient_chromatograms_w" + "ill_take_longer_to_import__consume_more_disk_space__and_may_make_peak_picking_le" + "ss_effective_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None of the spectral libraries in this document contain any retention times for any of the peptides in this document.. /// </summary> public static string FullScanSettingsControl_UpdateRetentionTimeFilterUi_None_of_the_spectral_libraries_in_this_document_contain_any_retention_times_for_any_of_the_peptides_in_this_document_ { get { return ResourceManager.GetString("FullScanSettingsControl_UpdateRetentionTimeFilterUi_None_of_the_spectral_librarie" + "s_in_this_document_contain_any_retention_times_for_any_of_the_peptides_in_this_d" + "ocument_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This document does not contain any spectral libraries.. /// </summary> public static string FullScanSettingsControl_UpdateRetentionTimeFilterUi_This_document_does_not_contain_any_spectral_libraries_ { get { return ResourceManager.GetString("FullScanSettingsControl_UpdateRetentionTimeFilterUi_This_document_does_not_contai" + "n_any_spectral_libraries_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No valid precursor m/z column found.. /// </summary> public static string GeneralRowReader_Create_No_valid_precursor_m_z_column_found { get { return ResourceManager.GetString("GeneralRowReader_Create_No_valid_precursor_m_z_column_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No valid product m/z column found.. /// </summary> public static string GeneralRowReader_Create_No_valid_product_m_z_column_found { get { return ResourceManager.GetString("GeneralRowReader_Create_No_valid_product_m_z_column_found", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap GenerateDecoys { get { object obj = ResourceManager.GetObject("GenerateDecoys", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to All. /// </summary> public static string GenerateDecoysDlg_GenerateDecoysDlg_All { get { return ResourceManager.GetString("GenerateDecoysDlg_GenerateDecoysDlg_All", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number of peptides {0} must be less than the number of peptide precursor models for decoys {1}, or use the &apos;{2}&apos; decoy generation method.. /// </summary> public static string GenerateDecoysDlg_OkDialog_The_number_of_peptides__0__must_be_less_than_the_number_of_peptide_precursor_models_for_decoys__1___or_use_the___2___decoy_generation_method_ { get { return ResourceManager.GetString("GenerateDecoysDlg_OkDialog_The_number_of_peptides__0__must_be_less_than_the_numbe" + "r_of_peptide_precursor_models_for_decoys__1___or_use_the___2___decoy_generation_" + "method_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peptide precursor models for decoys were found.. /// </summary> public static string GenerateDecoysError_No_peptide_precursor_models_for_decoys_were_found_ { get { return ResourceManager.GetString("GenerateDecoysError_No_peptide_precursor_models_for_decoys_were_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No optimization data available.. /// </summary> public static string GraphChromatogram_DisplayOptimizationTotals_No_optimization_data_available { get { return ResourceManager.GetString("GraphChromatogram_DisplayOptimizationTotals_No_optimization_data_available", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All. /// </summary> public static string GraphChromatogram_UpdateToolbar_All { get { return ResourceManager.GetString("GraphChromatogram_UpdateToolbar_All", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No base peak chromatogram found. /// </summary> public static string GraphChromatogram_UpdateUI_No_base_peak_chromatogram_found { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_No_base_peak_chromatogram_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No precursor ion chromatograms found. /// </summary> public static string GraphChromatogram_UpdateUI_No_precursor_ion_chromatograms_found { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_No_precursor_ion_chromatograms_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No product ion chromatograms found. /// </summary> public static string GraphChromatogram_UpdateUI_No_product_ion_chromatograms_found { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_No_product_ion_chromatograms_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No corresponding QC chromatogram found. /// </summary> public static string GraphChromatogram_UpdateUI_No_QC_chromatogram_found { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_No_QC_chromatogram_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No TIC chromatogram found. /// </summary> public static string GraphChromatogram_UpdateUI_No_TIC_chromatogram_found { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_No_TIC_chromatogram_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select a peptide, precursor or transition to view its chromatograms. /// </summary> public static string GraphChromatogram_UpdateUI_Select_a_peptide__precursor_or_transition_to_view_its_chromatograms { get { return ResourceManager.GetString("GraphChromatogram_UpdateUI_Select_a_peptide__precursor_or_transition_to_view_its_" + "chromatograms", resourceCulture); } } /// <summary> /// Looks up a localized string similar to window. /// </summary> public static string GraphData_AddRegressionLabel_window { get { return ResourceManager.GetString("GraphData_AddRegressionLabel_window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured Time ({0}). /// </summary> public static string GraphData_CorrelationLabel_Measured_Time___0__ { get { return ResourceManager.GetString("GraphData_CorrelationLabel_Measured_Time___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Outliers. /// </summary> public static string GraphData_Graph_Outliers { get { return ResourceManager.GetString("GraphData_Graph_Outliers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string GraphData_Graph_Peptides { get { return ResourceManager.GetString("GraphData_Graph_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides Refined. /// </summary> public static string GraphData_Graph_Peptides_Refined { get { return ResourceManager.GetString("GraphData_Graph_Peptides_Refined", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Predictor. /// </summary> public static string GraphData_Graph_Predictor { get { return ResourceManager.GetString("GraphData_Graph_Predictor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression. /// </summary> public static string GraphData_Graph_Regression { get { return ResourceManager.GetString("GraphData_Graph_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression Refined. /// </summary> public static string GraphData_Graph_Regression_Refined { get { return ResourceManager.GetString("GraphData_Graph_Regression_Refined", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The database for the calculator {0} could not be opened. Check that the file {1} was not moved or deleted.. /// </summary> public static string GraphData_GraphData_The_database_for_the_calculator__0__could_not_be_opened__Check_that_the_file__1__was_not_moved_or_deleted_ { get { return ResourceManager.GetString("GraphData_GraphData_The_database_for_the_calculator__0__could_not_be_opened__Chec" + "k_that_the_file__1__was_not_moved_or_deleted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time from Prediction. /// </summary> public static string GraphData_GraphResiduals_Time_from_Prediction { get { return ResourceManager.GetString("GraphData_GraphResiduals_Time_from_Prediction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time from Regression. /// </summary> public static string GraphData_GraphResiduals_Time_from_Regression { get { return ResourceManager.GetString("GraphData_GraphResiduals_Time_from_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time from Regression ({0}). /// </summary> public static string GraphData_ResidualsLabel_Time_from_Regression___0__ { get { return ResourceManager.GetString("GraphData_ResidualsLabel_Time_from_Regression___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} ({1:F2} min). /// </summary> public static string GraphFullScan_CreateGraph__0_____1_F2__min_ { get { return ResourceManager.GetString("GraphFullScan_CreateGraph__0_____1_F2__min_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to IM={0}. /// </summary> public static string GraphFullScan_CreateGraph_IM__0_ { get { return ResourceManager.GetString("GraphFullScan_CreateGraph_IM__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to IM Scan Range:. /// </summary> public static string GraphFullScan_CreateGraph_IM_Scan_Range_ { get { return ResourceManager.GetString("GraphFullScan_CreateGraph_IM_Scan_Range_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scan Number:. /// </summary> public static string GraphFullScan_CreateGraph_Scan_Number_ { get { return ResourceManager.GetString("GraphFullScan_CreateGraph_Scan_Number_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MS/MS. /// </summary> public static string GraphFullScan_GraphFullScan_MS_MS { get { return ResourceManager.GetString("GraphFullScan_GraphFullScan_MS_MS", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MS1. /// </summary> public static string GraphFullScan_GraphFullScan_MS1 { get { return ResourceManager.GetString("GraphFullScan_GraphFullScan_MS1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SIM. /// </summary> public static string GraphFullScan_GraphFullScan_SIM { get { return ResourceManager.GetString("GraphFullScan_GraphFullScan_SIM", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading.... /// </summary> public static string GraphFullScan_LoadScan_Loading___ { get { return ResourceManager.GetString("GraphFullScan_LoadScan_Loading___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectrum unavailable. /// </summary> public static string GraphFullScan_LoadScan_Spectrum_unavailable { get { return ResourceManager.GetString("GraphFullScan_LoadScan_Spectrum_unavailable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading spectrum. Library may be corrupted.. /// </summary> public static string GraphSpectrum_UpdateUI_Failure_loading_spectrum__Library_may_be_corrupted { get { return ResourceManager.GetString("GraphSpectrum_UpdateUI_Failure_loading_spectrum__Library_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiple charge states with library spectra. /// </summary> public static string GraphSpectrum_UpdateUI_Multiple_charge_states_with_library_spectra { get { return ResourceManager.GetString("GraphSpectrum_UpdateUI_Multiple_charge_states_with_library_spectra", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All. /// </summary> public static string GraphSummary_UpdateToolbar_All { get { return ResourceManager.GetString("GraphSummary_UpdateToolbar_All", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Log {0}. /// </summary> public static string GraphValues_Log_AxisTitle { get { return ResourceManager.GetString("GraphValues_Log_AxisTitle", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap GreenCheck { get { object obj = ResourceManager.GetObject("GreenCheck", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to End. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_End { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_End", resourceCulture); } } /// <summary> /// Looks up a localized string similar to End margin. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_End_margin { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_End_margin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to none. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_none { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_none", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_Start { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_Start", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start margin. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_Start_margin { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_Start_margin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Target. /// </summary> public static string GridColumnsExtension_getDefaultLanguageValues_Target { get { return ResourceManager.GetString("GridColumnsExtension_getDefaultLanguageValues_Target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An invalid number (&quot;{0}&quot;) was specified for {1} {2}.. /// </summary> public static string GridViewDriver_GetValue_An_invalid_number__0__was_specified_for__1__2__ { get { return ResourceManager.GetString("GridViewDriver_GetValue_An_invalid_number__0__was_specified_for__1__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to on line {0}. /// </summary> public static string GridViewDriver_GetValue_on_line__0__ { get { return ResourceManager.GetString("GridViewDriver_GetValue_on_line__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be a valid number.. /// </summary> public static string GridViewDriver_GridView_DataError__0__must_be_a_valid_number { get { return ResourceManager.GetString("GridViewDriver_GridView_DataError__0__must_be_a_valid_number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0}, {1}. /// </summary> public static string GridViewDriver_ValidateRow_On_line__0__1__ { get { return ResourceManager.GetString("GridViewDriver_ValidateRow_On_line__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0}, row has more than 2 columns. /// </summary> public static string GridViewDriver_ValidateRow_On_line__0__row_has_more_than_2_columns { get { return ResourceManager.GetString("GridViewDriver_ValidateRow_On_line__0__row_has_more_than_2_columns", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Acquired Time. /// </summary> public static string GroupGraphsOrderExtension_LOCALIZED_VALUES_Acquired_Time { get { return ResourceManager.GetString("GroupGraphsOrderExtension_LOCALIZED_VALUES_Acquired_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document. /// </summary> public static string GroupGraphsOrderExtension_LOCALIZED_VALUES_Document { get { return ResourceManager.GetString("GroupGraphsOrderExtension_LOCALIZED_VALUES_Document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Position. /// </summary> public static string GroupGraphsOrderExtension_LOCALIZED_VALUES_Position { get { return ResourceManager.GetString("GroupGraphsOrderExtension_LOCALIZED_VALUES_Position", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Isotope Modification. /// </summary> public static string HeavyModList_EditItem_Edit_Isotope_Modification { get { return ResourceManager.GetString("HeavyModList_EditItem_Edit_Isotope_Modification", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Isotope Modifications. /// </summary> public static string HeavyModList_Title_Edit_Isotope_Modifications { get { return ResourceManager.GetString("HeavyModList_Title_Edit_Isotope_Modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Nullable was expected to have a value.. /// </summary> public static string Helpers_AssumeValue_Nullable_was_expected_to_have_a_value { get { return ResourceManager.GetString("Helpers_AssumeValue_Nullable_was_expected_to_have_a_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure creating XML ID. Input string may not be empty.. /// </summary> public static string Helpers_MakeXmlId_Failure_creating_XML_ID_Input_string_may_not_be_empty { get { return ResourceManager.GetString("Helpers_MakeXmlId_Failure_creating_XML_ID_Input_string_may_not_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select. /// </summary> public static string HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Select { get { return ResourceManager.GetString("HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Select", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show Selection. /// </summary> public static string HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Show_Selection { get { return ResourceManager.GetString("HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Show_Selection", resourceCulture); } } /// <summary> /// Looks up a localized string similar to X-Axis Labels. /// </summary> public static string HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_X_Axis_Labels { get { return ResourceManager.GetString("HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_X_Axis_Labels", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Y-Axis Labels. /// </summary> public static string HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Y_Axis_Labels { get { return ResourceManager.GetString("HierarchicalClusterGraph_zedGraphControl1_ContextMenuBuilder_Y_Axis_Labels", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only showing {0}/{1} peptides. /// </summary> public static string HistogramHelper_CreateAndShowFindResults_Only_showing__0___1__peptides { get { return ResourceManager.GetString("HistogramHelper_CreateAndShowFindResults_Only_showing__0___1__peptides", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap HomeIcon1 { get { object obj = ResourceManager.GetObject("HomeIcon1", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to From Clipboard. /// </summary> public static string HtmlFragment_ClipBoardText_From_Clipboard { get { return ResourceManager.GetString("HtmlFragment_ClipBoardText_From_Clipboard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No data specified. /// </summary> public static string HtmlFragment_HtmlFragment_No_data_specified { get { return ResourceManager.GetString("HtmlFragment_HtmlFragment_No_data_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to StartFragment is already declared. /// </summary> public static string HtmlFragment_HtmlFragment_StartFragment_is_already_declared { get { return ResourceManager.GetString("HtmlFragment_HtmlFragment_StartFragment_is_already_declared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to StartFragment must be declared before EndFragment. /// </summary> public static string HtmlFragment_HtmlFragment_StartFragment_must_be_declared_before_EndFragment { get { return ResourceManager.GetString("HtmlFragment_HtmlFragment_StartFragment_must_be_declared_before_EndFragment", resourceCulture); } } /// <summary> /// Looks up a localized string similar to StartHtml is already declared. /// </summary> public static string HtmlFragment_HtmlFragment_StartHtml_is_already_declared { get { return ResourceManager.GetString("HtmlFragment_HtmlFragment_StartHtml_is_already_declared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to StartHTML must be declared before endHTML. /// </summary> public static string HtmlFragment_HtmlFragment_StartHTML_must_be_declared_before_endHTML { get { return ResourceManager.GetString("HtmlFragment_HtmlFragment_StartHTML_must_be_declared_before_endHTML", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Icojam_Blueberry_Basic_Arrow_left { get { object obj = ResourceManager.GetObject("Icojam-Blueberry-Basic-Arrow-left", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Icojam_Blueberry_Basic_Arrow_right { get { object obj = ResourceManager.GetObject("Icojam-Blueberry-Basic-Arrow-right", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Failed to find document node.. /// </summary> public static string IdentityNotFoundException_IdentityNotFoundException_Failed_to_find_document_node { get { return ResourceManager.GetString("IdentityNotFoundException_IdentityNotFoundException_Failed_to_find_document_node", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Index {0} out of range -1 to {1}. /// </summary> public static string IdentityPath_GetPathTo_Index__0__out_of_range_1_to__1__ { get { return ResourceManager.GetString("IdentityPath_GetPathTo_Index__0__out_of_range_1_to__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid attempt to perform parent operation on leaf node.. /// </summary> public static string IdentityPathTraversal_Traverse_Invalid_attempt_to_perform_parent_operation_on_leaf_node { get { return ResourceManager.GetString("IdentityPathTraversal_Traverse_Invalid_attempt_to_perform_parent_operation_on_lea" + "f_node", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT. /// </summary> public static string IIrtRegression_DisplayEquation_iRT { get { return ResourceManager.GetString("IIrtRegression_DisplayEquation_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured RT. /// </summary> public static string IIrtRegression_DisplayEquation_Measured_RT { get { return ResourceManager.GetString("IIrtRegression_DisplayEquation_Measured_RT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be saved before results may be imported.. /// </summary> public static string ImportDocResultsDlg_OkDialog_The_document_must_be_saved_before_results_may_be_imported { get { return ResourceManager.GetString("ImportDocResultsDlg_OkDialog_The_document_must_be_saved_before_results_may_be_imp" + "orted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Open FASTA. /// </summary> public static string ImportFastaControl_browseFastaBtn_Click_Open_FASTA { get { return ResourceManager.GetString("ImportFastaControl_browseFastaBtn_Click_Open_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot automatically train mProphet model since no results files are being imported. Continue without automatically training an mProphet model, or go back and add at least one results file.. /// </summary> public static string ImportFastaControl_cbAutoTrain_CheckedChanged_Cannot_automatically_train_mProphet_model_since_no_results_files_are_being_imported__Continue_without_automatically_training_an_mProphet_model__or_go_back_and_add_at_least_one_results_file_ { get { return ResourceManager.GetString("ImportFastaControl_cbAutoTrain_CheckedChanged_Cannot_automatically_train_mProphet" + "_model_since_no_results_files_are_being_imported__Continue_without_automatically" + "_training_an_mProphet_model__or_go_back_and_add_at_least_one_results_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading the file {0}.. /// </summary> public static string ImportFastaControl_GetFastaFileContent_Failed_reading_the_file__0__ { get { return ResourceManager.GetString("ImportFastaControl_GetFastaFileContent_Failed_reading_the_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A maximum of one decoy per target may be generated when using reversed decoys.. /// </summary> public static string ImportFastaControl_ImportFasta_A_maximum_of_one_decoy_per_target_may_be_generated_when_using_reversed_decoys_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_A_maximum_of_one_decoy_per_target_may_be_generated" + "_when_using_reversed_decoys_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot automatically train mProphet model without decoys, but decoy options resulted in no decoys being generated. Please increase number of decoys per target, or disable automatic training of mProphet model.. /// </summary> public static string ImportFastaControl_ImportFasta_Cannot_automatically_train_mProphet_model_without_decoys__but_decoy_options_resulted_in_no_decoys_being_generated__Please_increase_number_of_decoys_per_target__or_disable_automatic_training_of_mProphet_model_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Cannot_automatically_train_mProphet_model_without_" + "decoys__but_decoy_options_resulted_in_no_decoys_being_generated__Please_increase" + "_number_of_decoys_per_target__or_disable_automatic_training_of_mProphet_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change digestion settings. /// </summary> public static string ImportFastaControl_ImportFasta_Change_digestion_settings { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Change_digestion_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change settings. /// </summary> public static string ImportFastaControl_ImportFasta_Change_settings { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Change_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change settings to add precursors. /// </summary> public static string ImportFastaControl_ImportFasta_Change_settings_to_add_precursors { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Change_settings_to_add_precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing the FASTA did not create any target proteins.. /// </summary> public static string ImportFastaControl_ImportFasta_Importing_the_FASTA_did_not_create_any_target_proteins_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Importing_the_FASTA_did_not_create_any_target_prot" + "eins_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert FASTA. /// </summary> public static string ImportFastaControl_ImportFasta_Insert_FASTA { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Insert_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enter a valid number of decoys per target greater than 0.. /// </summary> public static string ImportFastaControl_ImportFasta_Please_enter_a_valid_number_of_decoys_per_target_greater_than_0_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Please_enter_a_valid_number_of_decoys_per_target_g" + "reater_than_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please import FASTA to add peptides to the document.. /// </summary> public static string ImportFastaControl_ImportFasta_Please_import_FASTA_to_add_peptides_to_the_document_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Please_import_FASTA_to_add_peptides_to_the_documen" + "t_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not contain any peptides.. /// </summary> public static string ImportFastaControl_ImportFasta_The_document_does_not_contain_any_peptides_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_The_document_does_not_contain_any_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not contain any precursor transitions.. /// </summary> public static string ImportFastaControl_ImportFasta_The_document_does_not_contain_any_precursor_transitions_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_The_document_does_not_contain_any_precursor_transi" + "tions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to change the document settings to automatically pick the precursor transitions specified in the full-scan settings?. /// </summary> public static string ImportFastaControl_ImportFasta_Would_you_like_to_change_the_document_settings_to_automatically_pick_the_precursor_transitions_specified_in_the_full_scan_settings_ { get { return ResourceManager.GetString("ImportFastaControl_ImportFasta_Would_you_like_to_change_the_document_settings_to_" + "automatically_pick_the_precursor_transitions_specified_in_the_full_scan_settings" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error adding FASTA file {0}.. /// </summary> public static string ImportFastaControl_SetFastaContent_Error_adding_FASTA_file__0__ { get { return ResourceManager.GetString("ImportFastaControl_SetFastaContent_Error_adding_FASTA_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain at least one precursor transition in order to proceed.. /// </summary> public static string ImportFastaControl_VerifyAtLeastOnePrecursorTransition_The_document_must_contain_at_least_one_precursor_transition_in_order_to_proceed_ { get { return ResourceManager.GetString("ImportFastaControl_VerifyAtLeastOnePrecursorTransition_The_document_must_contain_" + "at_least_one_precursor_transition_in_order_to_proceed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &apos;{0}&apos; is not a capital letter that corresponds to an amino acid.. /// </summary> public static string ImportFastaHelper_AddFasta___0___is_not_a_capital_letter_that_corresponds_to_an_amino_acid_ { get { return ResourceManager.GetString("ImportFastaHelper_AddFasta___0___is_not_a_capital_letter_that_corresponds_to_an_a" + "mino_acid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred: . /// </summary> public static string ImportFastaHelper_AddFasta_An_unexpected_error_occurred__ { get { return ResourceManager.GetString("ImportFastaHelper_AddFasta_An_unexpected_error_occurred__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is no name for this protein. /// </summary> public static string ImportFastaHelper_AddFasta_There_is_no_name_for_this_protein { get { return ResourceManager.GetString("ImportFastaHelper_AddFasta_There_is_no_name_for_this_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This must start with &apos;&gt;&apos;. /// </summary> public static string ImportFastaHelper_AddFasta_This_must_start_with____ { get { return ResourceManager.GetString("ImportFastaHelper_AddFasta_This_must_start_with____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is no sequence for this protein. /// </summary> public static string ImportFastaHelper_CheckSequence_There_is_no_sequence_for_this_protein { get { return ResourceManager.GetString("ImportFastaHelper_CheckSequence_There_is_no_sequence_for_this_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose the library you would like to add.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_OkDialog_Please_choose_the_library_you_would_like_to_add_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_OkDialog_Please_choose_the_library_you_would" + "_like_to_add_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only BiblioSpec libraries contain enough ion mobility information to support this operation.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Only_BiblioSpec_libraries_contain_enough_ion_mobility_information_to_support_this_operation_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Only_BiblioSpec_" + "libraries_contain_enough_ion_mobility_information_to_support_this_operation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please choose a non redundant library.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_choose_a_non_redundant_library_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_choose_a_" + "non_redundant_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a path to an existing spectral library.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_specify_a_path_to_an_existing_spectral_library { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_specify_a" + "_path_to_an_existing_spectral_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please specify a path to an existing spectral library.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_specify_a_path_to_an_existing_spectral_library_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_Please_specify_a" + "_path_to_an_existing_spectral_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} appears to be a redundant library.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__appears_to_be_a_redundant_library_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__app" + "ears_to_be_a_redundant_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__does_not_exist_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__doe" + "s_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a BiblioSpec library.. /// </summary> public static string ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__is_not_a_BiblioSpec_library_ { get { return ResourceManager.GetString("ImportIonMobilityFromSpectralLibrary_ValidateSpectralLibraryPath_The_file__0__is_" + "not_a_BiblioSpec_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MS/MS full-scan settings were configured, please verify or change your current full-scan settings.. /// </summary> public static string ImportPeptideSearchDlg_ImportPeptideSearchDlg_MS_MS_full_scan_settings_were_configured__please_verify_or_change_your_current_full_scan_settings_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_ImportPeptideSearchDlg_MS_MS_full_scan_settings_were_confi" + "gured__please_verify_or_change_your_current_full_scan_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Next &gt;. /// </summary> public static string ImportPeptideSearchDlg_ImportPeptideSearchDlg_Next { get { return ResourceManager.GetString("ImportPeptideSearchDlg_ImportPeptideSearchDlg_Next", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot build library from OpenSWATH results mixed with results from other tools.. /// </summary> public static string ImportPeptideSearchDlg_NextPage_Cannot_build_library_from_OpenSWATH_results_mixed_with_results_from_other_tools_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Cannot_build_library_from_OpenSWATH_results_mixed" + "_with_results_from_other_tools_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A FASTA file is required for the DDA search.. /// </summary> public static string ImportPeptideSearchDlg_NextPage_FastFileMissing_DDASearch { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_FastFileMissing_DDASearch", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Finish. /// </summary> public static string ImportPeptideSearchDlg_NextPage_Finish { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Finish", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import FASTA (optional). /// </summary> public static string ImportPeptideSearchDlg_NextPage_Import_FASTA__optional_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Import_FASTA__optional_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import FASTA (required). /// </summary> public static string ImportPeptideSearchDlg_NextPage_Import_FASTA__required_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Import_FASTA__required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results files were specified. Are you sure you want to continue? Continuing will create a template document with no imported results.. /// </summary> public static string ImportPeptideSearchDlg_NextPage_No_results_files_were_specified__Are_you_sure_you_want_to_continue__Continuing_will_create_a_template_document_with_no_imported_results_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_No_results_files_were_specified__Are_you_sure_you" + "_want_to_continue__Continuing_will_create_a_template_document_with_no_imported_r" + "esults_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please check your peptide search pipeline or contact Skyline support to ensure retention times appear in your spectral libraries.. /// </summary> public static string ImportPeptideSearchDlg_NextPage_Please_check_your_peptide_search_pipeline_or_contact_Skyline_support_to_ensure_retention_times_appear_in_your_spectral_libraries_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Please_check_your_peptide_search_pipeline_or_cont" + "act_Skyline_support_to_ensure_retention_times_appear_in_your_spectral_libraries_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Some results files are still missing. Are you sure you want to continue?. /// </summary> public static string ImportPeptideSearchDlg_NextPage_Some_results_files_are_still_missing__Are_you_sure_you_want_to_continue_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_Some_results_files_are_still_missing__Are_you_sur" + "e_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document specific spectral library does not have valid retention times.. /// </summary> public static string ImportPeptideSearchDlg_NextPage_The_document_specific_spectral_library_does_not_have_valid_retention_times_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_NextPage_The_document_specific_spectral_library_does_not_h" + "ave_valid_retention_times_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Full-scan MS1 filtering must be enabled in order to import a peptide search.. /// </summary> public static string ImportPeptideSearchDlg_UpdateFullScanSettings_Full_scan_MS1_filtering_must_be_enabled_in_order_to_import_a_peptide_search_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_UpdateFullScanSettings_Full_scan_MS1_filtering_must_be_ena" + "bled_in_order_to_import_a_peptide_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Full-scan MS1 or MS/MS filtering must be enabled in order to import a peptide search.. /// </summary> public static string ImportPeptideSearchDlg_UpdateFullScanSettings_Full_scan_MS1_or_MS_MS_filtering_must_be_enabled_in_order_to_import_a_peptide_search_ { get { return ResourceManager.GetString("ImportPeptideSearchDlg_UpdateFullScanSettings_Full_scan_MS1_or_MS_MS_filtering_mu" + "st_be_enabled_in_order_to_import_a_peptide_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current peak scoring model is incompatible with one or more peptides in the document.. /// </summary> public static string ImportPeptideSearchManager_LoadBackground_The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document_ { get { return ResourceManager.GetString("ImportPeptideSearchManager_LoadBackground_The_current_peak_scoring_model_is_incom" + "patible_with_one_or_more_peptides_in_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Peptide Search. /// </summary> public static string ImportResultsControl_browseToResultsFileButton_Click_Import_Peptide_Search { get { return ResourceManager.GetString("ImportResultsControl_browseToResultsFileButton_Click_Import_Peptide_Search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to find missing result files in {0}.. /// </summary> public static string ImportResultsControl_FindDataFiles_An_error_occurred_attempting_to_find_missing_result_files_in__0__ { get { return ResourceManager.GetString("ImportResultsControl_FindDataFiles_An_error_occurred_attempting_to_find_missing_r" + "esult_files_in__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Searching for missing result files in {0}.. /// </summary> public static string ImportResultsControl_FindDataFiles_Searching_for_missing_result_files_in__0__ { get { return ResourceManager.GetString("ImportResultsControl_FindDataFiles_Searching_for_missing_result_files_in__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to find results files.. /// </summary> public static string ImportResultsControl_FindResultsFiles_An_error_occurred_attempting_to_find_results_files_ { get { return ResourceManager.GetString("ImportResultsControl_FindResultsFiles_An_error_occurred_attempting_to_find_result" + "s_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Searching for matching results files in {0}.. /// </summary> public static string ImportResultsControl_FindResultsFiles_Searching_for_matching_results_files_in__0__ { get { return ResourceManager.GetString("ImportResultsControl_FindResultsFiles_Searching_for_matching_results_files_in__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Searching for Results Files. /// </summary> public static string ImportResultsControl_FindResultsFiles_Searching_for_Results_Files { get { return ResourceManager.GetString("ImportResultsControl_FindResultsFiles_Searching_for_Results_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find all the missing results files.. /// </summary> public static string ImportResultsControl_findResultsFilesButton_Click_Could_not_find_all_the_missing_results_files_ { get { return ResourceManager.GetString("ImportResultsControl_findResultsFilesButton_Click_Could_not_find_all_the_missing_" + "results_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results Directory. /// </summary> public static string ImportResultsControl_findResultsFilesButton_Click_Results_Directory { get { return ResourceManager.GetString("ImportResultsControl_findResultsFilesButton_Click_Results_Directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import results. /// </summary> public static string ImportResultsControl_GetPeptideSearchChromatograms_Import_results { get { return ResourceManager.GetString("ImportResultsControl_GetPeptideSearchChromatograms_Import_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Browse for Results Files. /// </summary> public static string ImportResultsDIAControl_btnBrowse_Click_Browse_for_Results_Files { get { return ResourceManager.GetString("ImportResultsDIAControl_btnBrowse_Click_Browse_for_Results_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current document does not appear to have enough transitions to require multiple injections. ///Are you sure you want to continue?. /// </summary> public static string ImportResultsDlg_CanCreateMultiInjectionMethods_The_current_document_does_not_appear_to_have_enough_transitions_to_require_multiple_injections { get { return ResourceManager.GetString("ImportResultsDlg_CanCreateMultiInjectionMethods_The_current_document_does_not_app" + "ear_to_have_enough_transitions_to_require_multiple_injections", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Chromatograms. /// </summary> public static string ImportResultsDlg_DefaultNewName_Default_Name { get { return ResourceManager.GetString("ImportResultsDlg_DefaultNewName_Default_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results found in the folder {0}.. /// </summary> public static string ImportResultsDlg_GetDataSourcePathsDir_No_results_found_in_the_folder__0__ { get { return ResourceManager.GetString("ImportResultsDlg_GetDataSourcePathsDir_No_results_found_in_the_folder__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results Directory. /// </summary> public static string ImportResultsDlg_GetDataSourcePathsDir_Results_Directory { get { return ResourceManager.GetString("ImportResultsDlg_GetDataSourcePathsDir_Results_Directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Results Files. /// </summary> public static string ImportResultsDlg_GetDataSourcePathsFile_Import_Results_Files { get { return ResourceManager.GetString("ImportResultsDlg_GetDataSourcePathsFile_Import_Results_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results files chosen.. /// </summary> public static string ImportResultsDlg_GetDataSourcePathsFile_No_results_files_chosen { get { return ResourceManager.GetString("ImportResultsDlg_GetDataSourcePathsFile_No_results_files_chosen", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to read sample information from the file {0}.. /// </summary> public static string ImportResultsDlg_GetWiffSubPaths_An_error_occurred_attempting_to_read_sample_information_from_the_file__0__ { get { return ResourceManager.GetString("ImportResultsDlg_GetWiffSubPaths_An_error_occurred_attempting_to_read_sample_info" + "rmation_from_the_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading sample names from {0}. /// </summary> public static string ImportResultsDlg_GetWiffSubPaths_Reading_sample_names_from__0__ { get { return ResourceManager.GetString("ImportResultsDlg_GetWiffSubPaths_Reading_sample_names_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sample Names. /// </summary> public static string ImportResultsDlg_GetWiffSubPaths_Sample_Names { get { return ResourceManager.GetString("ImportResultsDlg_GetWiffSubPaths_Sample_Names", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file may be corrupted, missing, or the correct libraries may not be installed.. /// </summary> public static string ImportResultsDlg_GetWiffSubPaths_The_file_may_be_corrupted_missing_or_the_correct_libraries_may_not_be_installed { get { return ResourceManager.GetString("ImportResultsDlg_GetWiffSubPaths_The_file_may_be_corrupted_missing_or_the_correct" + "_libraries_may_not_be_installed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A result name may not contain any of the characters &apos;{0}&apos;.. /// </summary> public static string ImportResultsDlg_OkDialog_A_result_name_may_not_contain_any_of_the_characters___0___ { get { return ResourceManager.GetString("ImportResultsDlg_OkDialog_A_result_name_may_not_contain_any_of_the_characters___0" + "___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The specified name already exists for this document.. /// </summary> public static string ImportResultsDlg_OkDialog_The_specified_name_already_exists_for_this_document { get { return ResourceManager.GetString("ImportResultsDlg_OkDialog_The_specified_name_already_exists_for_this_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must select an existing set of results to which to append new data.. /// </summary> public static string ImportResultsDlg_OkDialog_You_must_select_an_existing_set_of_results_to_which_to_append_new_data { get { return ResourceManager.GetString("ImportResultsDlg_OkDialog_You_must_select_an_existing_set_of_results_to_which_to_" + "append_new_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The files you have chosen have a common prefix and suffix. ///Would you like to remove some or all of the prefix or suffix to shorten the names used in Skyline?. /// </summary> public static string ImportResultsNameDlg_CommonPrefix_and_Suffix { get { return ResourceManager.GetString("ImportResultsNameDlg_CommonPrefix_and_Suffix", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The files you have chosen have a common suffix. ///Would you like to remove some or all of this suffix to shorten the names used in Skyline?. /// </summary> public static string ImportResultsNameDlg_CommonSuffix { get { return ResourceManager.GetString("ImportResultsNameDlg_CommonSuffix", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; is not a prefix of the files chosen.. /// </summary> public static string ImportResultsNameDlg_OkDialog_The_text__0__is_not_a_prefix_of_the_files_chosen { get { return ResourceManager.GetString("ImportResultsNameDlg_OkDialog_The_text__0__is_not_a_prefix_of_the_files_chosen", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; is not a suffix of the files chosen.. /// </summary> public static string ImportResultsNameDlg_OkDialog_The_text__0__is_not_a_suffix_of_the_files_chosen { get { return ResourceManager.GetString("ImportResultsNameDlg_OkDialog_The_text__0__is_not_a_suffix_of_the_files_chosen", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following names already exist: /// ///{0} /// ///. /// </summary> public static string ImportSkyrHelper_ResolveImportConflicts_ { get { return ResourceManager.GetString("ImportSkyrHelper_ResolveImportConflicts_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolving conflicts by overwriting.. /// </summary> public static string ImportSkyrHelper_ResolveImportConflicts_Resolving_conflicts_by_overwriting_ { get { return ResourceManager.GetString("ImportSkyrHelper_ResolveImportConflicts_Resolving_conflicts_by_overwriting_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolving conflicts by skipping.. /// </summary> public static string ImportSkyrHelper_ResolveImportConflicts_Resolving_conflicts_by_skipping_ { get { return ResourceManager.GetString("ImportSkyrHelper_ResolveImportConflicts_Resolving_conflicts_by_skipping_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; already exists.. /// </summary> public static string ImportSkyrHelper_ResolveImportConflicts_The_name___0___already_exists_ { get { return ResourceManager.GetString("ImportSkyrHelper_ResolveImportConflicts_The_name___0___already_exists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Please specify a way to resolve conflicts. Use command --report-conflict-resolution=&lt; overwrite | skip &gt;.. /// </summary> public static string ImportSkyrHelper_ResolveImportConflicts_Use_command { get { return ResourceManager.GetString("ImportSkyrHelper_ResolveImportConflicts_Use_command", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The input text did not appear to contain column headers. Use the dropdown control to assign column meanings for import.. /// </summary> public static string ImportTransitionListColumnSelectDlg_DisplayData_The_input_text_did_not_appear_to_contain_column_headers__Use_the_dropdown_control_to_assign_column_meanings_for_import_ { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_DisplayData_The_input_text_did_not_appear_to_" + "contain_column_headers__Use_the_dropdown_control_to_assign_column_meanings_for_i" + "mport_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This column is labeled with the header &apos;{0}&apos; in the input text. Use the dropdown control to assign its meaning for import.. /// </summary> public static string ImportTransitionListColumnSelectDlg_DisplayData_This_column_is_labeled_with_the_header___0___in_the_input_text__Use_the_dropdown_control_to_assign_its_meaning_for_import_ { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_DisplayData_This_column_is_labeled_with_the_h" + "eader___0___in_the_input_text__Use_the_dropdown_control_to_assign_its_meaning_fo" + "r_import_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoy. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Decoy { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Decoy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fragment Name. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Fragment_Name { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Fragment_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ignore Column. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Ignore_Column { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Ignore_Column", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_iRT { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Label Type. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Label_Type { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Label_Type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library Intensity. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Library_Intensity { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Library_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Modified Sequence. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Peptide_Modified_Sequence { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Peptide_Modified_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Charge. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Precursor_Charge { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Precursor_Charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Precursor_m_z { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Precursor_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/z. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Product_m_z { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Product_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein Name. /// </summary> public static string ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Protein_Name { get { return ResourceManager.GetString("ImportTransitionListColumnSelectDlg_PopulateComboBoxes_Protein_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A transition contained an error.. /// </summary> public static string ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_A_transition_contained_an_error_ { get { return ResourceManager.GetString("ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_A_transition_contained_" + "an_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A transition contained an error. Skip this transition and import the rest?. /// </summary> public static string ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_A_transition_contained_an_error__Skip_this_transition_and_import_the_rest_ { get { return ResourceManager.GetString("ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_A_transition_contained_" + "an_error__Skip_this_transition_and_import_the_rest_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All {0} transitions contained errors. Please check the transition list for errors and try importing again.. /// </summary> public static string ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_All__0__transitions_contained_errors___Please_check_the_transition_list_for_errors_and_try_importing_again_ { get { return ResourceManager.GetString("ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_All__0__transitions_con" + "tained_errors___Please_check_the_transition_list_for_errors_and_try_importing_ag" + "ain_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The imported transition contains an error. Please check the transition list and the Skyline settings and try importing again.. /// </summary> public static string ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_The_imported_transition_contains_an_error__Please_check_the_transition_list_and_the_Skyline_settings_and_try_importing_again_ { get { return ResourceManager.GetString("ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_The_imported_transition" + "_contains_an_error__Please_check_the_transition_list_and_the_Skyline_settings_an" + "d_try_importing_again_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This transition list cannot be imported as it does not provide values for:. /// </summary> public static string ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_This_transition_list_cannot_be_imported_as_it_does_not_provide_values_for_ { get { return ResourceManager.GetString("ImportTransitionListErrorDlg_ImportTransitionListErrorDlg_This_transition_list_ca" + "nnot_be_imported_as_it_does_not_provide_values_for_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The calculator {0} requires all of its standard peptides in order to determine a regression.. /// </summary> public static string IncompleteStandardException_ERROR_The_calculator__0__requires_all_of_its_standard_peptides_in_order_to_determine_a_regression_ { get { return ResourceManager.GetString("IncompleteStandardException_ERROR_The_calculator__0__requires_all_of_its_standard" + "_peptides_in_order_to_determine_a_regression_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The calculator {0} requires all of its standard peptides in order to determine a regression.. /// </summary> public static string IncompleteStandardException_IncompleteStandardException_The_calculator__0__requires_all_of_its_standard_peptides_in_order_to_determine_a_regression { get { return ResourceManager.GetString("IncompleteStandardException_IncompleteStandardException_The_calculator__0__requir" + "es_all_of_its_standard_peptides_in_order_to_determine_a_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}, column{1}: {2}. /// </summary> public static string InsertSmallMoleculeTransitionList_InsertSmallMoleculeTransitionList_Error_on_line__0___column_1____2_ { get { return ResourceManager.GetString("InsertSmallMoleculeTransitionList_InsertSmallMoleculeTransitionList_Error_on_line" + "__0___column_1____2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: {1}. /// </summary> public static string InsertSmallMoleculeTransitionList_InsertSmallMoleculeTransitionList_Error_on_line__0__1_ { get { return ResourceManager.GetString("InsertSmallMoleculeTransitionList_InsertSmallMoleculeTransitionList_Error_on_line" + "__0__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected line in instrument config: {0}. /// </summary> public static string InstrumentInfoUtil_ReadInstrumentConfig_Unexpected_line_in_instrument_config__0__ { get { return ResourceManager.GetString("InstrumentInfoUtil_ReadInstrumentConfig_Unexpected_line_in_instrument_config__0__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adducts: /// ///Ion formulas may contain an adduct description using the defacto standard notation as seen at http://fiehnlab.ucdavis.edu/staff/kind/Metabolomics/MS-Adduct-Calculator (e.g. &quot;C47H51NO14[M+IsoProp+H]&quot;). /// ///When only the ion charge is known (often the case with fragments) charge-only adducts such as &quot;[M+]&quot; (z=1), &quot;[M-]&quot; (z=-1), &quot;[M+3]&quot; (z=3), etc., may be used. /// ///Multipliers (e.g. the &quot;2&quot; in &quot;[2M+K]&quot;) and isotope labels (e.g. the &quot;2Cl37&quot; in &quot;[M2Cl37+H]&quot;) are supported. /// ///Recognized adduct com [rest of string was truncated]&quot;;. /// </summary> public static string IonInfo_AdductTips_ { get { return ResourceManager.GetString("IonInfo_AdductTips_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion Mobility Library Files. /// </summary> public static string IonMobilityDb_FILTER_IONMOBILITYLIBRARY_Ion_Mobility_Library_Files { get { return ResourceManager.GetString("IonMobilityDb_FILTER_IONMOBILITYLIBRARY_Ion_Mobility_Library_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading ion mobility library {0}. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please provide a path to an existing ion mobility library.. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_library_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_" + "library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a valid ion mobility library file.. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_file_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_f" + "ile_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion mobility library file {0} could not be found. Perhaps you did not have sufficient privileges to create it?. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_fou" + "nd__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path containing ion mobility library {0} does not exist.. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_not_exist_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_" + "not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You do not have privileges to access the ion mobility library file {0}. /// </summary> public static string IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobility_library_file__0_ { get { return ResourceManager.GetString("IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobil" + "ity_library_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string IonMobilityFilter_IonMobilityUnitsL10NString_None { get { return ResourceManager.GetString("IonMobilityFilter_IonMobilityUnitsL10NString_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1/K0 (Vs/cm^2). /// </summary> public static string IonMobilityFilter_IonMobilityUnitsString__1_K0__Vs_cm_2_ { get { return ResourceManager.GetString("IonMobilityFilter_IonMobilityUnitsString__1_K0__Vs_cm_2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation Voltage (V). /// </summary> public static string IonMobilityFilter_IonMobilityUnitsString_Compensation_Voltage__V_ { get { return ResourceManager.GetString("IonMobilityFilter_IonMobilityUnitsString_Compensation_Voltage__V_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Drift Time (ms). /// </summary> public static string IonMobilityFilter_IonMobilityUnitsString_Drift_Time__ms_ { get { return ResourceManager.GetString("IonMobilityFilter_IonMobilityUnitsString_Drift_Time__ms_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixed window size must be greater than 0.. /// </summary> public static string IonMobilityFilteringUserControl_ValidateFixedWindow_Fixed_window_size_must_be_greater_than_0_ { get { return ResourceManager.GetString("IonMobilityFilteringUserControl_ValidateFixedWindow_Fixed_window_size_must_be_gre" + "ater_than_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed using results to populate ion mobility library:. /// </summary> public static string IonMobilityFinder_ProcessMSLevel_Failed_using_results_to_populate_ion_mobility_library_ { get { return ResourceManager.GetString("IonMobilityFinder_ProcessMSLevel_Failed_using_results_to_populate_ion_mobility_li" + "brary_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to will be deleted because the libraries they depend on have changed. Do you want to continue?. /// </summary> public static string IonMobilityLibraryList_AcceptList_will_be_deleted_because_the_libraries_they_depend_on_have_changed__Do_you_want_to_continue_ { get { return ResourceManager.GetString("IonMobilityLibraryList_AcceptList_will_be_deleted_because_the_libraries_they_depe" + "nd_on_have_changed__Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion Mobility Libraries:. /// </summary> public static string IonMobilityLibraryList_Label_Ion_Mobility_Libraries_ { get { return ResourceManager.GetString("IonMobilityLibraryList_Label_Ion_Mobility_Libraries_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Ion Mobility Libraries. /// </summary> public static string IonMobilityLibraryList_Title_Edit_Ion_Mobility_Libraries { get { return ResourceManager.GetString("IonMobilityLibraryList_Title_Edit_Ion_Mobility_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion mobility predictors using an ion mobility library must include per-charge regression values.. /// </summary> public static string IonMobilityPredictor_Validate_Ion_mobility_predictors_using_an_ion_mobility_library_must_include_per_charge_regression_values_ { get { return ResourceManager.GetString("IonMobilityPredictor_Validate_Ion_mobility_predictors_using_an_ion_mobility_libra" + "ry_must_include_per_charge_regression_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion mobility library file {0} could not be found. Perhaps you did not have sufficient privileges to create it?. /// </summary> public static string IonMobilityTest_TestGetIonMobilityDBErrorHandling_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_ { get { return ResourceManager.GetString("IonMobilityTest_TestGetIonMobilityDBErrorHandling_The_ion_mobility_library_file__" + "0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_" + "it_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_1 { get { object obj = ResourceManager.GetObject("Ions_1", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_2 { get { object obj = ResourceManager.GetObject("Ions_2", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_A { get { object obj = ResourceManager.GetObject("Ions_A", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_B { get { object obj = ResourceManager.GetObject("Ions_B", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_C { get { object obj = ResourceManager.GetObject("Ions_C", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_fragments { get { object obj = ResourceManager.GetObject("Ions_fragments", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_X { get { object obj = ResourceManager.GetObject("Ions_X", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_Y { get { object obj = ResourceManager.GetObject("Ions_Y", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Ions_Z { get { object obj = ResourceManager.GetObject("Ions_Z", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to custom. /// </summary> public static string IonTypeExtension_LOCALIZED_VALUES_custom { get { return ResourceManager.GetString("IonTypeExtension_LOCALIZED_VALUES_custom", resourceCulture); } } /// <summary> /// Looks up a localized string similar to precursor. /// </summary> public static string IonTypeExtension_LOCALIZED_VALUES_precursor { get { return ResourceManager.GetString("IonTypeExtension_LOCALIZED_VALUES_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding peptides. /// </summary> public static string IrtDb_AddPeptides_Adding_peptides { get { return ResourceManager.GetString("IrtDb_AddPeptides_Adding_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT Database Files. /// </summary> public static string IrtDb_FILTER_IRTDB_iRT_Database_Files { get { return ResourceManager.GetString("IrtDb_FILTER_IRTDB_iRT_Database_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Database path cannot be null. /// </summary> public static string IrtDb_GetIrtDb_Database_path_cannot_be_null { get { return ResourceManager.GetString("IrtDb_GetIrtDb_Database_path_cannot_be_null", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading iRT database {0}. /// </summary> public static string IrtDb_GetIrtDb_Loading_iRT_database__0_ { get { return ResourceManager.GetString("IrtDb_GetIrtDb_Loading_iRT_database__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be created. Perhaps you do not have sufficient privileges.. /// </summary> public static string IrtDb_GetIrtDb_The_file__0__could_not_be_created_Perhaps_you_do_not_have_sufficient_privileges { get { return ResourceManager.GetString("IrtDb_GetIrtDb_The_file__0__could_not_be_created_Perhaps_you_do_not_have_sufficie" + "nt_privileges", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be opened.. /// </summary> public static string IrtDb_GetIrtDb_The_file__0__could_not_be_opened { get { return ResourceManager.GetString("IrtDb_GetIrtDb_The_file__0__could_not_be_opened", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string IrtDb_GetIrtDb_The_file__0__does_not_exist_ { get { return ResourceManager.GetString("IrtDb_GetIrtDb_The_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a valid iRT database file.. /// </summary> public static string IrtDb_GetIrtDb_The_file__0__is_not_a_valid_iRT_database_file { get { return ResourceManager.GetString("IrtDb_GetIrtDb_The_file__0__is_not_a_valid_iRT_database_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path containing {0} does not exist. /// </summary> public static string IrtDb_GetIrtDb_The_path_containing__0__does_not_exist { get { return ResourceManager.GetString("IrtDb_GetIrtDb_The_path_containing__0__does_not_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You do not have privileges to access the file {0}. /// </summary> public static string IrtDb_GetIrtDb_You_do_not_have_privileges_to_access_the_file__0_ { get { return ResourceManager.GetString("IrtDb_GetIrtDb_You_do_not_have_privileges_to_access_the_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT standards. /// </summary> public static string IrtDb_MakeDocumentXml_iRT_standards { get { return ResourceManager.GetString("IrtDb_MakeDocumentXml_iRT_standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Linear. /// </summary> public static string IrtRegressionType_Linear { get { return ResourceManager.GetString("IrtRegressionType_Linear", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Logarithmic. /// </summary> public static string IrtRegressionType_Logarithmic { get { return ResourceManager.GetString("IrtRegressionType_Logarithmic", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Lowess. /// </summary> public static string IrtRegressionType_Lowess { get { return ResourceManager.GetString("IrtRegressionType_Lowess", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Automatic. /// </summary> public static string IrtStandard_AUTO_Automatic { get { return ResourceManager.GetString("IrtStandard_AUTO_Automatic", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No document to import. /// </summary> public static string IrtStandard_DocumentStream_No_document_to_import { get { return ResourceManager.GetString("IrtStandard_DocumentStream_No_document_to_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT Standards. /// </summary> public static string IrtStandardList_Label_iRT_Standards { get { return ResourceManager.GetString("IrtStandardList_Label_iRT_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit iRT Standards. /// </summary> public static string IrtStandardList_Title_Edit_iRT_Standards { get { return ResourceManager.GetString("IrtStandardList_Title_Edit_iRT_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation scheme can specify multiplexed windows only for prespecified isolation windows. /// </summary> public static string IsolationScheme_DoValidate_Isolation_scheme_can_specify_multiplexed_windows_only_for_prespecified_isolation_windows { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Isolation_scheme_can_specify_multiplexed_windows_only_" + "for_prespecified_isolation_windows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation scheme cannot have a filter and a prespecifed isolation window. /// </summary> public static string IsolationScheme_DoValidate_Isolation_scheme_cannot_have_a_filter_and_a_prespecifed_isolation_window { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Isolation_scheme_cannot_have_a_filter_and_a_prespecife" + "d_isolation_window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation scheme cannot have a right filter without a left filter. /// </summary> public static string IsolationScheme_DoValidate_Isolation_scheme_cannot_have_a_right_filter_without_a_left_filter { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Isolation_scheme_cannot_have_a_right_filter_without_a_" + "left_filter", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation scheme for all ions cannot contain isolation windows. /// </summary> public static string IsolationScheme_DoValidate_Isolation_scheme_for_all_ions_cannot_contain_isolation_windows { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Isolation_scheme_for_all_ions_cannot_contain_isolation" + "_windows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiplexed windows require at least one window per scan. /// </summary> public static string IsolationScheme_DoValidate_Multiplexed_windows_require_at_least_one_window_per_scan { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Multiplexed_windows_require_at_least_one_window_per_sc" + "an", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Special handling applies only to prespecified isolation windows. /// </summary> public static string IsolationScheme_DoValidate_Special_handling_applies_only_to_prespecified_isolation_windows { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Special_handling_applies_only_to_prespecified_isolatio" + "n_windows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number of prespecified isolation windows must be a multiple of the windows per scan in multiplexed sampling.. /// </summary> public static string IsolationScheme_DoValidate_The_number_of_prespecified_isolation_windows_must_be_a_multiple_of_the_windows_per_scan_in_multiplexed_sampling { get { return ResourceManager.GetString("IsolationScheme_DoValidate_The_number_of_prespecified_isolation_windows_must_be_a" + "_multiple_of_the_windows_per_scan_in_multiplexed_sampling", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z filter must be between {0} and {1}. /// </summary> public static string IsolationScheme_DoValidate_The_precursor_m_z_filter_must_be_between__0__and__1_ { get { return ResourceManager.GetString("IsolationScheme_DoValidate_The_precursor_m_z_filter_must_be_between__0__and__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected name &apos;{0}&apos; for {1} isolation scheme. /// </summary> public static string IsolationScheme_DoValidate_Unexpected_name___0___for__1__isolation_scheme { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Unexpected_name___0___for__1__isolation_scheme", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Windows per scan requires multiplexed isolation windows. /// </summary> public static string IsolationScheme_DoValidate_Windows_per_scan_requires_multiplexed_isolation_windows { get { return ResourceManager.GetString("IsolationScheme_DoValidate_Windows_per_scan_requires_multiplexed_isolation_window" + "s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results (0.5 margin). /// </summary> public static string IsolationSchemeList_GetDefaults_Results__0_5_margin_ { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_Results__0_5_margin_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results only. /// </summary> public static string IsolationSchemeList_GetDefaults_Results_only { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_Results_only", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SWATH (15 m/z). /// </summary> public static string IsolationSchemeList_GetDefaults_SWATH__15_m_z_ { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_SWATH__15_m_z_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SWATH (25 m/z). /// </summary> public static string IsolationSchemeList_GetDefaults_SWATH__25_m_z_ { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_SWATH__25_m_z_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SWATH (VW 100). /// </summary> public static string IsolationSchemeList_GetDefaults_SWATH__VW_100_ { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_SWATH__VW_100_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SWATH (VW 64). /// </summary> public static string IsolationSchemeList_GetDefaults_SWATH__VW_64_ { get { return ResourceManager.GetString("IsolationSchemeList_GetDefaults_SWATH__VW_64_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Isolation scheme:. /// </summary> public static string IsolationSchemeList_Label_Isolation_scheme { get { return ResourceManager.GetString("IsolationSchemeList_Label_Isolation_scheme", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Isolation Scheme. /// </summary> public static string IsolationSchemeList_Title_Edit_Isolation_Scheme { get { return ResourceManager.GetString("IsolationSchemeList_Title_Edit_Isolation_Scheme", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading isolation scheme from {0}. /// </summary> public static string IsolationSchemeReader_ReadIsolationRangesFromFiles_Reading_isolation_scheme_from__0_ { get { return ResourceManager.GetString("IsolationSchemeReader_ReadIsolationRangesFromFiles_Reading_isolation_scheme_from_" + "_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixed. /// </summary> public static string IsolationWidthType_FIXED_Fixed { get { return ResourceManager.GetString("IsolationWidthType_FIXED_Fixed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results. /// </summary> public static string IsolationWidthType_RESULTS_Results { get { return ResourceManager.GetString("IsolationWidthType_RESULTS_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results with margin. /// </summary> public static string IsolationWidthType_RESULTS_WITH_MARGIN_Results_with_margin { get { return ResourceManager.GetString("IsolationWidthType_RESULTS_WITH_MARGIN_Results_with_margin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window End must be between {0} and {1}.. /// </summary> public static string IsolationWindow_DoValidate_Isolation_window_End_must_be_between__0__and__1__ { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Isolation_window_End_must_be_between__0__and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window margin must be non-negative.. /// </summary> public static string IsolationWindow_DoValidate_Isolation_window_margin_must_be_non_negative { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Isolation_window_margin_must_be_non_negative", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window margins cover the entire isolation window at the extremes of the instrument range.. /// </summary> public static string IsolationWindow_DoValidate_Isolation_window_margins_cover_the_entire_isolation_window_at_the_extremes_of_the_instrument_range { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Isolation_window_margins_cover_the_entire_isolation_wi" + "ndow_at_the_extremes_of_the_instrument_range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window Start must be between {0} and {1}.. /// </summary> public static string IsolationWindow_DoValidate_Isolation_window_Start_must_be_between__0__and__1__ { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Isolation_window_Start_must_be_between__0__and__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window Start value is greater than the End value.. /// </summary> public static string IsolationWindow_DoValidate_Isolation_window_Start_value_is_greater_than_the_End_value { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Isolation_window_Start_value_is_greater_than_the_End_v" + "alue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Target value is not within the range of the isolation window.. /// </summary> public static string IsolationWindow_DoValidate_Target_value_is_not_within_the_range_of_the_isolation_window { get { return ResourceManager.GetString("IsolationWindow_DoValidate_Target_value_is_not_within_the_range_of_the_isolation_" + "window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isolation window requires a Target value.. /// </summary> public static string IsolationWindow_TargetMatches_Isolation_window_requires_a_Target_value { get { return ResourceManager.GetString("IsolationWindow_TargetMatches_Isolation_window_requires_a_Target_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Minimum abundance {0} too high. /// </summary> public static string IsotopeDistInfo_IsotopeDistInfo_Minimum_abundance__0__too_high { get { return ResourceManager.GetString("IsotopeDistInfo_IsotopeDistInfo_Minimum_abundance__0__too_high", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Atom percent enrichment {0} must be between {1} and {2}. /// </summary> public static string IsotopeEnrichmentItem_DoValidate_Atom_percent_enrichment__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("IsotopeEnrichmentItem_DoValidate_Atom_percent_enrichment__0__must_be_between__1__" + "and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope enrichment is not supported for the symbol {0}. /// </summary> public static string IsotopeEnrichmentItem_DoValidate_Isotope_enrichment_is_not_supported_for_the_symbol__0__ { get { return ResourceManager.GetString("IsotopeEnrichmentItem_DoValidate_Isotope_enrichment_is_not_supported_for_the_symb" + "ol__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} = {1}%. /// </summary> public static string IsotopeEnrichmentItem_ToString__0__1__Percent { get { return ResourceManager.GetString("IsotopeEnrichmentItem_ToString__0__1__Percent", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default. /// </summary> public static string IsotopeEnrichments_DEFAULT_Default { get { return ResourceManager.GetString("IsotopeEnrichments_DEFAULT_Default", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Isotope labeling entrichment:. /// </summary> public static string IsotopeEnrichmentsList_Label_Isotope_labeling_entrichment { get { return ResourceManager.GetString("IsotopeEnrichmentsList_Label_Isotope_labeling_entrichment", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Isotope Labeling Enrichments. /// </summary> public static string IsotopeEnrichmentsList_Title_Edit_Isotope_Labeling_Enrichments { get { return ResourceManager.GetString("IsotopeEnrichmentsList_Title_Edit_Isotope_Labeling_Enrichments", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Permuting isotope modifications. /// </summary> public static string IsotopeModificationPermuter_PermuteIsotopeModifications_Permuting_isotope_modifications { get { return ResourceManager.GetString("IsotopeModificationPermuter_PermuteIsotopeModifications_Permuting_isotope_modific" + "ations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to KDE Aligner. /// </summary> public static string KdeAlignerFactory_ToString_KDE_Aligner { get { return ResourceManager.GetString("KdeAlignerFactory_ToString_KDE_Aligner", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Keep { get { object obj = ResourceManager.GetObject("Keep", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to LabelType. /// </summary> public static string LabelTypeComboDriver_LabelTypeComboDriver_LabelType { get { return ResourceManager.GetString("LabelTypeComboDriver_LabelTypeComboDriver_LabelType", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Edit list...&gt;. /// </summary> public static string LabelTypeComboDriver_LoadList_Edit_list { get { return ResourceManager.GetString("LabelTypeComboDriver_LoadList_Edit_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to I&amp;nternal standard type:. /// </summary> public static string LabelTypeComboDriver_LoadList_Internal_standard_type { get { return ResourceManager.GetString("LabelTypeComboDriver_LoadList_Internal_standard_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to I&amp;nternal standard types:. /// </summary> public static string LabelTypeComboDriver_LoadList_Internal_standard_types { get { return ResourceManager.GetString("LabelTypeComboDriver_LoadList_Internal_standard_types", resourceCulture); } } /// <summary> /// Looks up a localized string similar to none. /// </summary> public static string LabelTypeComboDriver_LoadList_none { get { return ResourceManager.GetString("LabelTypeComboDriver_LoadList_none", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap LabKey { get { object obj = ResourceManager.GetObject("LabKey", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Identified count. /// </summary> public static string LegacyIdentifiedCountCalc_LegacyIdentifiedCountCalc_Legacy_identified_count { get { return ResourceManager.GetString("LegacyIdentifiedCountCalc_LegacyIdentifiedCountCalc_Legacy_identified_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Log co-eluting area. /// </summary> public static string LegacyLogUnforcedAreaCalc_LegacyLogUnforcedAreaCalc_Legacy_log_unforced_area { get { return ResourceManager.GetString("LegacyLogUnforcedAreaCalc_LegacyLogUnforcedAreaCalc_Legacy_log_unforced_area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default. /// </summary> public static string LegacyScoringModel_DEFAULT_NAME_Default { get { return ResourceManager.GetString("LegacyScoringModel_DEFAULT_NAME_Default", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Legacy scoring model is not trained.. /// </summary> public static string LegacyScoringModel_DoValidate_Legacy_scoring_model_is_not_trained_ { get { return ResourceManager.GetString("LegacyScoringModel_DoValidate_Legacy_scoring_model_is_not_trained_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid legacy model.. /// </summary> public static string LegacyScoringModel_ReadXml_Invalid_legacy_model_ { get { return ResourceManager.GetString("LegacyScoringModel_ReadXml_Invalid_legacy_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Co-elution count. /// </summary> public static string LegacyUnforcedCountScoreCalc_LegacyUnforcedCountScoreCalc_Legacy_unforced_count { get { return ResourceManager.GetString("LegacyUnforcedCountScoreCalc_LegacyUnforcedCountScoreCalc_Legacy_unforced_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default co-elution count. /// </summary> public static string LegacyUnforcedCountScoreDefaultCalc_Name_Default_co_elution_count { get { return ResourceManager.GetString("LegacyUnforcedCountScoreDefaultCalc_Name_Default_co_elution_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference co-elution count. /// </summary> public static string LegacyUnforcedCountScoreStandardCalc_LegacyUnforcedCountScoreStandardCalc_Legacy_unforced_count_standard { get { return ResourceManager.GetString("LegacyUnforcedCountScoreStandardCalc_LegacyUnforcedCountScoreStandardCalc_Legacy_" + "unforced_count_standard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number &apos;{0}&apos; is not in the correct format.. /// </summary> public static string LibKeyModificationMatcher_EnumerateSequenceInfos_The_number___0___is_not_in_the_correct_format_ { get { return ResourceManager.GetString("LibKeyModificationMatcher_EnumerateSequenceInfos_The_number___0___is_not_in_the_c" + "orrect_format_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data truncation in library header. File may be corrupted.. /// </summary> public static string Library_ReadComplete_Data_truncation_in_library_header_File_may_be_corrupted { get { return ResourceManager.GetString("Library_ReadComplete_Data_truncation_in_library_header_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} distinct CiRT peptides were found. How many would you like to use as iRT standards?. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts__0__distinct_CiRT_peptides_were_found__How_many_would_you_like_to_use_as_iRT_standards_ { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts__0__distinct_CiRT_peptides_were_found__Ho" + "w_many_would_you_like_to_use_as_iRT_standards_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding iRTs to library. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts_Adding_iRTs_to_library { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts_Adding_iRTs_to_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred trying to add iRTs to the library.. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts_An_error_occurred_trying_to_add_iRTs_to_the_library_ { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts_An_error_occurred_trying_to_add_iRTs_to_t" + "he_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading library. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts_Loading_library { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts_Loading_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading retention time providers. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts_Loading_retention_time_providers { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts_Loading_retention_time_providers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Processing retention times. /// </summary> public static string LibraryBuildNotificationHandler_AddIrts_Processing_retention_times { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddIrts_Processing_retention_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add retention time predictor. /// </summary> public static string LibraryBuildNotificationHandler_AddRetentionTimePredictor_Add_retention_time_predictor { get { return ResourceManager.GetString("LibraryBuildNotificationHandler_AddRetentionTimePredictor_Add_retention_time_pred" + "ictor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding iRT Database. /// </summary> public static string LibraryGridViewDriver_AddIrtDatabase_Adding_iRT_Database { get { return ResourceManager.GetString("LibraryGridViewDriver_AddIrtDatabase_Adding_iRT_Database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to load the iRT database file {0}.. /// </summary> public static string LibraryGridViewDriver_AddIrtDatabase_An_error_occurred_attempting_to_load_the_iRT_database_file__0__ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddIrtDatabase_An_error_occurred_attempting_to_load_the_iRT" + "_database_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding optimization library. /// </summary> public static string LibraryGridViewDriver_AddOptimizationLibrary_Adding_optimization_library { get { return ResourceManager.GetString("LibraryGridViewDriver_AddOptimizationLibrary_Adding_optimization_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding optimization values from {0}. /// </summary> public static string LibraryGridViewDriver_AddOptimizationLibrary_Adding_optimization_values_from__0_ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddOptimizationLibrary_Adding_optimization_values_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to load the optimization library file {0}.. /// </summary> public static string LibraryGridViewDriver_AddOptimizationLibrary_An_error_occurred_attempting_to_load_the_optimization_library_file__0__ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddOptimizationLibrary_An_error_occurred_attempting_to_load" + "_the_optimization_library_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A single run does not have high enough correlation to the existing iRT values to allow retention time conversion.. /// </summary> public static string LibraryGridViewDriver_AddProcessedIrts_A_single_run_does_not_have_high_enough_correlation_to_the_existing_iRT_values_to_allow_retention_time_conversion { get { return ResourceManager.GetString("LibraryGridViewDriver_AddProcessedIrts_A_single_run_does_not_have_high_enough_cor" + "relation_to_the_existing_iRT_values_to_allow_retention_time_conversion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Correlation to the existing iRT values are not high enough to allow retention time conversion.. /// </summary> public static string LibraryGridViewDriver_AddProcessedIrts_Correlation_to_the_existing_iRT_values_are_not_high_enough_to_allow_retention_time_conversion { get { return ResourceManager.GetString("LibraryGridViewDriver_AddProcessedIrts_Correlation_to_the_existing_iRT_values_are" + "_not_high_enough_to_allow_retention_time_conversion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None of {0} runs were found with high enough correlation to the existing iRT values to allow retention time conversion.. /// </summary> public static string LibraryGridViewDriver_AddProcessedIrts_None_of__0__runs_were_found_with_high_enough_correlation_to_the_existing_iRT_values_to_allow_retention_time_conversion { get { return ResourceManager.GetString("LibraryGridViewDriver_AddProcessedIrts_None_of__0__runs_were_found_with_high_enou" + "gh_correlation_to_the_existing_iRT_values_to_allow_retention_time_conversion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding Results. /// </summary> public static string LibraryGridViewDriver_AddResults_Adding_Results { get { return ResourceManager.GetString("LibraryGridViewDriver_AddResults_Adding_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding retention times from imported results. /// </summary> public static string LibraryGridViewDriver_AddResults_Adding_retention_times_from_imported_results { get { return ResourceManager.GetString("LibraryGridViewDriver_AddResults_Adding_retention_times_from_imported_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to add results from current document.. /// </summary> public static string LibraryGridViewDriver_AddResults_An_error_occurred_attempting_to_add_results_from_current_document { get { return ResourceManager.GetString("LibraryGridViewDriver_AddResults_An_error_occurred_attempting_to_add_results_from" + "_current_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The active document must contain results in order to add iRT values.. /// </summary> public static string LibraryGridViewDriver_AddResults_The_active_document_must_contain_results_in_order_to_add_iRT_values { get { return ResourceManager.GetString("LibraryGridViewDriver_AddResults_The_active_document_must_contain_results_in_orde" + "r_to_add_iRT_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The active document must contain results in order to add optimized values.. /// </summary> public static string LibraryGridViewDriver_AddResults_The_active_document_must_contain_results_in_order_to_add_optimized_values_ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddResults_The_active_document_must_contain_results_in_orde" + "r_to_add_optimized_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding retention times from {0}. /// </summary> public static string LibraryGridViewDriver_AddSpectralLibrary_Adding_retention_times_from__0__ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddSpectralLibrary_Adding_retention_times_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding Spectral Library. /// </summary> public static string LibraryGridViewDriver_AddSpectralLibrary_Adding_Spectral_Library { get { return ResourceManager.GetString("LibraryGridViewDriver_AddSpectralLibrary_Adding_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to load the library file {0}.. /// </summary> public static string LibraryGridViewDriver_AddSpectralLibrary_An_error_occurred_attempting_to_load_the_library_file__0__ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddSpectralLibrary_An_error_occurred_attempting_to_load_the" + "_library_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library {0} does not contain retention time information.. /// </summary> public static string LibraryGridViewDriver_AddSpectralLibrary_The_library__0__does_not_contain_retention_time_information { get { return ResourceManager.GetString("LibraryGridViewDriver_AddSpectralLibrary_The_library__0__does_not_contain_retenti" + "on_time_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while recalibrating.. /// </summary> public static string LibraryGridViewDriver_AddToLibrary_An_error_occurred_while_recalibrating_ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddToLibrary_An_error_occurred_while_recalibrating_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to recalibrate the iRT standard values relative to the peptides being added?. /// </summary> public static string LibraryGridViewDriver_AddToLibrary_Do_you_want_to_recalibrate_the_iRT_standard_values_relative_to_the_peptides_being_added_ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddToLibrary_Do_you_want_to_recalibrate_the_iRT_standard_va" + "lues_relative_to_the_peptides_being_added_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Recalibrate iRT Standard Peptides. /// </summary> public static string LibraryGridViewDriver_AddToLibrary_Recalibrate_iRT_Standard_Peptides { get { return ResourceManager.GetString("LibraryGridViewDriver_AddToLibrary_Recalibrate_iRT_Standard_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Recalibrating iRT standard peptides and reprocessing iRT values. /// </summary> public static string LibraryGridViewDriver_AddToLibrary_Recalibrating_iRT_standard_peptides_and_reprocessing_iRT_values { get { return ResourceManager.GetString("LibraryGridViewDriver_AddToLibrary_Recalibrating_iRT_standard_peptides_and_reproc" + "essing_iRT_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This can improve retention time alignment under stable chromatographic conditions.. /// </summary> public static string LibraryGridViewDriver_AddToLibrary_This_can_improve_retention_time_alignment_under_stable_chromatographic_conditions_ { get { return ResourceManager.GetString("LibraryGridViewDriver_AddToLibrary_This_can_improve_retention_time_alignment_unde" + "r_stable_chromatographic_conditions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already an optimization with sequence &apos;{0}&apos; and product ion &apos;{2}&apos; in the list.. /// </summary> public static string LibraryGridViewDriver_DoCellValidating_There_is_already_an_optimization_with_sequence___0___and_product_ion___2___in_the_list_ { get { return ResourceManager.GetString("LibraryGridViewDriver_DoCellValidating_There_is_already_an_optimization_with_sequ" + "ence___0___and_product_ion___2___in_the_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide {0} is already present in the {1} table, and may not be pasted into the {2} table.. /// </summary> public static string LibraryGridViewDriver_DoPaste_The_peptide__0__is_already_present_in_the__1__table__and_may_not_be_pasted_into_the__2__table { get { return ResourceManager.GetString("LibraryGridViewDriver_DoPaste_The_peptide__0__is_already_present_in_the__1__table" + "__and_may_not_be_pasted_into_the__2__table", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding retention times. /// </summary> public static string LibraryGridViewDriver_ProcessRetentionTimes_Adding_retention_times { get { return ResourceManager.GetString("LibraryGridViewDriver_ProcessRetentionTimes_Adding_retention_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Converting retention times from {0}. /// </summary> public static string LibraryGridViewDriver_ProcessRetentionTimes_Converting_retention_times_from__0__ { get { return ResourceManager.GetString("LibraryGridViewDriver_ProcessRetentionTimes_Converting_retention_times_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/zs must be greater than zero.. /// </summary> public static string LibraryGridViewDriver_ValidateMz_Product_m_zs_must_be_greater_than_zero_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateMz_Product_m_zs_must_be_greater_than_zero_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing product ion on line {1}.. /// </summary> public static string LibraryGridViewDriver_ValidateOptimizationRow_Missing_product_ion_on_line__1_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateOptimizationRow_Missing_product_ion_on_line__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The pasted text must contain the same number of columns as the table.. /// </summary> public static string LibraryGridViewDriver_ValidateOptimizationRow_The_pasted_text_must_contain_the_same_number_of_columns_as_the_table_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateOptimizationRow_The_pasted_text_must_contain_the_sa" + "me_number_of_columns_as_the_table_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimized values must be greater than zero.. /// </summary> public static string LibraryGridViewDriver_ValidateOptimizedValue_Optimized_values_must_be_greater_than_zero_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateOptimizedValue_Optimized_values_must_be_greater_tha" + "n_zero_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimized values must be valid decimal numbers.. /// </summary> public static string LibraryGridViewDriver_ValidateOptimizedValue_Optimized_values_must_be_valid_decimal_numbers_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateOptimizedValue_Optimized_values_must_be_valid_decim" + "al_numbers_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product ion {0} is invalid.. /// </summary> public static string LibraryGridViewDriver_ValidateProductIon_Product_ion__0__is_invalid_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateProductIon_Product_ion__0__is_invalid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product ion cannot be empty.. /// </summary> public static string LibraryGridViewDriver_ValidateProductIon_Product_ion_cannot_be_empty_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateProductIon_Product_ion_cannot_be_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid product ion format {0} on line {1}.. /// </summary> public static string LibraryGridViewDriver_ValidateRow_Invalid_product_ion_format__0__on_line__1__ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateRow_Invalid_product_ion_format__0__on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sequence cannot be empty.. /// </summary> public static string LibraryGridViewDriver_ValidateSequence_Sequence_cannot_be_empty_ { get { return ResourceManager.GetString("LibraryGridViewDriver_ValidateSequence_Sequence_cannot_be_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating library settings for {0}. /// </summary> public static string LibraryManager_LoadBackground_Updating_library_settings_for__0_ { get { return ResourceManager.GetString("LibraryManager_LoadBackground_Updating_library_settings_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add spectral library. /// </summary> public static string LibrarySpec_Add_spectral_library { get { return ResourceManager.GetString("LibrarySpec_Add_spectral_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized library type at {0}. /// </summary> public static string LibrarySpec_CreateFromPath_Unrecognized_library_type_at__0_ { get { return ResourceManager.GetString("LibrarySpec_CreateFromPath_Unrecognized_library_type_at__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectrum count. /// </summary> public static string LibrarySpec_PEP_RANK_COPIES_Spectrum_count { get { return ResourceManager.GetString("LibrarySpec_PEP_RANK_COPIES_Spectrum_count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Picked intensity. /// </summary> public static string LibrarySpec_PEP_RANK_PICKED_INTENSITY_Picked_intensity { get { return ResourceManager.GetString("LibrarySpec_PEP_RANK_PICKED_INTENSITY_Picked_intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total intensity. /// </summary> public static string LibrarySpec_PEP_RANK_TOTAL_INTENSITY_Total_intensity { get { return ResourceManager.GetString("LibrarySpec_PEP_RANK_TOTAL_INTENSITY_Total_intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document library specs cannot be persisted to XML.. /// </summary> public static string LibrarySpec_WriteXml_Document_library_specs_cannot_be_persisted_to_XML_ { get { return ResourceManager.GetString("LibrarySpec_WriteXml_Document_library_specs_cannot_be_persisted_to_XML_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document local library specs cannot be persisted to XML.. /// </summary> public static string LibrarySpec_WriteXml_Document_local_library_specs_cannot_be_persisted_to_XML { get { return ResourceManager.GetString("LibrarySpec_WriteXml_Document_local_library_specs_cannot_be_persisted_to_XML", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to do a regression in log space because one or more points are non-positive.. /// </summary> public static string LinearInLogSpace_FitPoints_Unable_to_do_a_regression_in_log_space_because_one_or_more_points_are_non_positive_ { get { return ResourceManager.GetString("LinearInLogSpace_FitPoints_Unable_to_do_a_regression_in_log_space_because_one_or_" + "more_points_are_non_positive_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Linear in Log Space. /// </summary> public static string LinearInLogSpace_Label_Linear_in_Log_Space { get { return ResourceManager.GetString("LinearInLogSpace_Label_Linear_in_Log_Space", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Every calculator in the model either has an unknown value, or takes on only one value.. /// </summary> public static string LinearModelParams_RescaleParameters_Every_calculator_in_the_model_either_has_an_unknown_value__or_takes_on_only_one_value_ { get { return ResourceManager.GetString("LinearModelParams_RescaleParameters_Every_calculator_in_the_model_either_has_an_u" + "nknown_value__or_takes_on_only_one_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempted to score a peak with {0} features using a model with {1} trained scores.. /// </summary> public static string LinearModelParams_Score_Attempted_to_score_a_peak_with__0__features_using_a_model_with__1__trained_scores_ { get { return ResourceManager.GetString("LinearModelParams_Score_Attempted_to_score_a_peak_with__0__features_using_a_model" + "_with__1__trained_scores_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}, line {1}.. /// </summary> public static string LineColNumberedIoException_FormatMessage__0___line__1__ { get { return ResourceManager.GetString("LineColNumberedIoException_FormatMessage__0___line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}, line {1}, col {2}.. /// </summary> public static string LineColNumberedIoException_FormatMessage__0___line__1___col__2__ { get { return ResourceManager.GetString("LineColNumberedIoException_FormatMessage__0___line__1___col__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (fixed). /// </summary> public static string ListBoxModification_ToString__fixed_ { get { return ResourceManager.GetString("ListBoxModification_ToString__fixed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (isotopic label). /// </summary> public static string ListBoxModification_ToString__isotopic_label_ { get { return ResourceManager.GetString("ListBoxModification_ToString__isotopic_label_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (variable). /// </summary> public static string ListBoxModification_ToString__variable_ { get { return ResourceManager.GetString("ListBoxModification_ToString__variable_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No such list {0}. /// </summary> public static string ListColumnPropertyDescriptor_ChangeListData_No_such_list__0_ { get { return ResourceManager.GetString("ListColumnPropertyDescriptor_ChangeListData_No_such_list__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List has been deleted. /// </summary> public static string ListColumnPropertyDescriptor_SetValue_List_has_been_deleted { get { return ResourceManager.GetString("ListColumnPropertyDescriptor_SetValue_List_has_been_deleted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List item has been deleted.. /// </summary> public static string ListColumnPropertyDescriptor_SetValue_List_item_has_been_deleted_ { get { return ResourceManager.GetString("ListColumnPropertyDescriptor_SetValue_List_item_has_been_deleted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Lists. /// </summary> public static string ListDefList_Label_Lists { get { return ResourceManager.GetString("ListDefList_Label_Lists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Define Lists. /// </summary> public static string ListDefList_Title_Define_Lists { get { return ResourceManager.GetString("ListDefList_Title_Define_Lists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Duplicate property name. /// </summary> public static string ListDesigner_OkDialog_Duplicate_property_name { get { return ResourceManager.GetString("ListDesigner_OkDialog_Duplicate_property_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No such property. /// </summary> public static string ListDesigner_OkDialog_No_such_property { get { return ResourceManager.GetString("ListDesigner_OkDialog_No_such_property", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already a list named &apos;{0}&apos;.. /// </summary> public static string ListDesigner_OkDialog_There_is_already_a_list_named___0___ { get { return ResourceManager.GetString("ListDesigner_OkDialog_There_is_already_a_list_named___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There was an error trying to apply this list definition to the original data:. /// </summary> public static string ListDesigner_OkDialog_There_was_an_error_trying_to_apply_this_list_definition_to_the_original_data_ { get { return ResourceManager.GetString("ListDesigner_OkDialog_There_was_an_error_trying_to_apply_this_list_definition_to_" + "the_original_data_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Column &apos;{0}&apos; does not exist.. /// </summary> public static string ListExceptionDetail_ColumnNotFound_Column___0___does_not_exist_ { get { return ResourceManager.GetString("ListExceptionDetail_ColumnNotFound_Column___0___does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Duplicate value &apos;{1}&apos; found in column &apos;{0}&apos;.. /// </summary> public static string ListExceptionDetail_DuplicateValue_Duplicate_value___1___found_in_column___0___ { get { return ResourceManager.GetString("ListExceptionDetail_DuplicateValue_Duplicate_value___1___found_in_column___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid value &apos;{1}&apos; for column &apos;{0}&apos;.. /// </summary> public static string ListExceptionDetail_InvalidValue_Invalid_value___1___for_column___0___ { get { return ResourceManager.GetString("ListExceptionDetail_InvalidValue_Invalid_value___1___for_column___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Column &apos;{0}&apos; cannot be blank.. /// </summary> public static string ListExceptionDetail_NullValue_Column___0___cannot_be_blank_ { get { return ResourceManager.GetString("ListExceptionDetail_NullValue_Column___0___cannot_be_blank_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List: . /// </summary> public static string ListGridForm_BindingListSourceOnListChanged_List__ { get { return ResourceManager.GetString("ListGridForm_BindingListSourceOnListChanged_List__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Number. /// </summary> public static string ListPropertyType_GetAnnotationTypeName_Number { get { return ResourceManager.GetString("ListPropertyType_GetAnnotationTypeName_Number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Text. /// </summary> public static string ListPropertyType_GetAnnotationTypeName_Text { get { return ResourceManager.GetString("ListPropertyType_GetAnnotationTypeName_Text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to True/False. /// </summary> public static string ListPropertyType_GetAnnotationTypeName_True_False { get { return ResourceManager.GetString("ListPropertyType_GetAnnotationTypeName_True_False", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Value List. /// </summary> public static string ListPropertyType_GetAnnotationTypeName_Value_List { get { return ResourceManager.GetString("ListPropertyType_GetAnnotationTypeName_Value_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Lookup: . /// </summary> public static string ListPropertyType_Label_Lookup__ { get { return ResourceManager.GetString("ListPropertyType_Label_Lookup__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add new item to list &apos;{0}&apos;. /// </summary> public static string ListViewContext_CommitAddNew_Add_new_item_to_list___0__ { get { return ResourceManager.GetString("ListViewContext_CommitAddNew_Add_new_item_to_list___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the {0} selected items from the list &apos;{1}&apos;?. /// </summary> public static string ListViewContext_Delete_Are_you_sure_you_want_to_delete_the__0__selected_items_from_the_list___1___ { get { return ResourceManager.GetString("ListViewContext_Delete_Are_you_sure_you_want_to_delete_the__0__selected_items_fro" + "m_the_list___1___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete from list &apos;{0}&apos;. /// </summary> public static string ListViewContext_Delete_Delete_from_list___0__ { get { return ResourceManager.GetString("ListViewContext_Delete_Delete_from_list___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Press OK to continue editing your row, or Cancel to throw away the new row.. /// </summary> public static string ListViewContext_ValidateNewRow_Press_OK_to_continue_editing_your_row__or_Cancel_to_throw_away_the_new_row_ { get { return ResourceManager.GetString("ListViewContext_ValidateNewRow_Press_OK_to_continue_editing_your_row__or_Cancel_t" + "o_throw_away_the_new_row_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The new row could not be added because of the following error:. /// </summary> public static string ListViewContext_ValidateNewRow_The_new_row_could_not_be_added_because_of_the_following_error_ { get { return ResourceManager.GetString("ListViewContext_ValidateNewRow_The_new_row_could_not_be_added_because_of_the_foll" + "owing_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data import canceled. /// </summary> public static string LoadCanceledException_LoadCanceledException_Data_import_canceled { get { return ResourceManager.GetString("LoadCanceledException_LoadCanceledException_Data_import_canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed importing results into &apos;{0}&apos;.. /// </summary> public static string Loader_Fail_Failed_importing_results_into___0___ { get { return ResourceManager.GetString("Loader_Fail_Failed_importing_results_into___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating peak statistics. /// </summary> public static string Loader_FinishLoad_Updating_peak_statistics { get { return ResourceManager.GetString("Loader_FinishLoad_Updating_peak_statistics", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to load the data cache file {0}. /// </summary> public static string Loader_Load_Failure_attempting_to_load_the_data_cache_file__0_ { get { return ResourceManager.GetString("Loader_Load_Failure_attempting_to_load_the_data_cache_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure reading the data file {0}.. /// </summary> public static string Loader_Load_Failure_reading_the_data_file__0__ { get { return ResourceManager.GetString("Loader_Load_Failure_reading_the_data_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading results for {0}. /// </summary> public static string Loader_Load_Loading_results_for__0__ { get { return ResourceManager.GetString("Loader_Load_Loading_results_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data import expected to consume {0} minutes with maximum of {1} minutes. /// </summary> public static string LoadingTooSlowlyException_LoadingTooSlowlyException_Data_import_expected_to_consume__0__minutes_with_maximum_of__1__mintues { get { return ResourceManager.GetString("LoadingTooSlowlyException_LoadingTooSlowlyException_Data_import_expected_to_consu" + "me__0__minutes_with_maximum_of__1__mintues", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Below is the saved value for the path to the executable.. /// </summary> public static string LocateFileDlg_LocateFileDlg_Below_is_the_saved_value_for_the_path_to_the_executable { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_Below_is_the_saved_value_for_the_path_to_the_executab" + "le", resourceCulture); } } /// <summary> /// Looks up a localized string similar to If you have it installed please provide the path below.. /// </summary> public static string LocateFileDlg_LocateFileDlg_If_you_have_it_installed_please_provide_the_path_below { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_If_you_have_it_installed_please_provide_the_path_belo" + "w", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Otherwise, please cancel and install {0} first. /// </summary> public static string LocateFileDlg_LocateFileDlg_Otherwise__please_cancel_and_install__0__first { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_Otherwise__please_cancel_and_install__0__first", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Otherwise, please cancel and install {0} version {1} first. /// </summary> public static string LocateFileDlg_LocateFileDlg_Otherwise__please_cancel_and_install__0__version__1__first { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_Otherwise__please_cancel_and_install__0__version__1__" + "first", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please verify and update if incorrect.. /// </summary> public static string LocateFileDlg_LocateFileDlg_Please_verify_and_update_if_incorrect { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_Please_verify_and_update_if_incorrect", resourceCulture); } } /// <summary> /// Looks up a localized string similar to then run the tool again.. /// </summary> public static string LocateFileDlg_LocateFileDlg_then_run_the_tool_again { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_then_run_the_tool_again", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires {0}.. /// </summary> public static string LocateFileDlg_LocateFileDlg_This_tool_requires_0 { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_This_tool_requires_0", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires {0} version {1}.. /// </summary> public static string LocateFileDlg_LocateFileDlg_This_tool_requires_0_version_1 { get { return ResourceManager.GetString("LocateFileDlg_LocateFileDlg_This_tool_requires_0_version_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You have not provided a valid path.. /// </summary> public static string LocateFileDlg_PathPasses_You_have_not_provided_a_valid_path_ { get { return ResourceManager.GetString("LocateFileDlg_PathPasses_You_have_not_provided_a_valid_path_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to canceled. /// </summary> public static string LongWaitDlg_PerformWork_canceled { get { return ResourceManager.GetString("LongWaitDlg_PerformWork_canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Always. /// </summary> public static string LossInclusionExtension_LOCALIZED_VALUES_Always { get { return ResourceManager.GetString("LossInclusionExtension_LOCALIZED_VALUES_Always", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching Library. /// </summary> public static string LossInclusionExtension_LOCALIZED_VALUES_Matching_Library { get { return ResourceManager.GetString("LossInclusionExtension_LOCALIZED_VALUES_Matching_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Never. /// </summary> public static string LossInclusionExtension_LOCALIZED_VALUES_Never { get { return ResourceManager.GetString("LossInclusionExtension_LOCALIZED_VALUES_Never", resourceCulture); } } /// <summary> /// Looks up a localized string similar to LOWESS Aligner. /// </summary> public static string LowessAlignerFactory_ToString_LOWESS_Aligner { get { return ResourceManager.GetString("LowessAlignerFactory_ToString_LOWESS_Aligner", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap magnifier_zoom_in { get { object obj = ResourceManager.GetObject("magnifier_zoom_in", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to All results must be completely imported before any can be re-imported.. /// </summary> public static string ManageResultsDlg_ReimportResults_All_results_must_be_completely_imported_before_any_can_be_re_imported { get { return ResourceManager.GetString("ManageResultsDlg_ReimportResults_All_results_must_be_completely_imported_before_a" + "ny_can_be_re_imported", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find the following files, either in their original locations or in the folder of the current document:. /// </summary> public static string ManageResultsDlg_ReimportResults_Unable_to_find_the_following_files_either_in_their_original_locations_or_in_the_folder_of_the_current_document { get { return ResourceManager.GetString("ManageResultsDlg_ReimportResults_Unable_to_find_the_following_files_either_in_the" + "ir_original_locations_or_in_the_folder_of_the_current_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Manually integrated peaks. /// </summary> public static string ManuallyIntegratedPeakFinder_DisplayName_Manually_integrated_peaks { get { return ResourceManager.GetString("ManuallyIntegratedPeakFinder_DisplayName_Manually_integrated_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Manually integrated peak. /// </summary> public static string ManuallyIntegratedPeakFinder_MatchTransition_Manually_integrated_peak { get { return ResourceManager.GetString("ManuallyIntegratedPeakFinder_MatchTransition_Manually_integrated_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z. /// </summary> public static string MassErrorHistogram2DGraphPane_Graph_Mz { get { return ResourceManager.GetString("MassErrorHistogram2DGraphPane_Graph_Mz", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string MassErrorHistogram2DGraphPane_Graph_Retention_Time { get { return ResourceManager.GetString("MassErrorHistogram2DGraphPane_Graph_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Errors Unavailable. /// </summary> public static string MassErrorHistogramGraphPane_AddLabels_Mass_Errors_Unavailable { get { return ResourceManager.GetString("MassErrorHistogramGraphPane_AddLabels_Mass_Errors_Unavailable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mean. /// </summary> public static string MassErrorHistogramGraphPane_AddLabels_mean { get { return ResourceManager.GetString("MassErrorHistogramGraphPane_AddLabels_mean", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard Deviation. /// </summary> public static string MassErrorHistogramGraphPane_AddLabels_standard_deviation { get { return ResourceManager.GetString("MassErrorHistogramGraphPane_AddLabels_standard_deviation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Count. /// </summary> public static string MassErrorHistogramGraphPane_UpdateGraph_Count { get { return ResourceManager.GetString("MassErrorHistogramGraphPane_UpdateGraph_Count", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Error (ppm). /// </summary> public static string MassErrorReplicateGraphPane_UpdateGraph_Mass_Error { get { return ResourceManager.GetString("MassErrorReplicateGraphPane_UpdateGraph_Mass_Error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Error. /// </summary> public static string MassErrorReplicateGraphPane_UpdateGraph_Mass_Error_No_Ppm { get { return ResourceManager.GetString("MassErrorReplicateGraphPane_UpdateGraph_Mass_Error_No_Ppm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select a peptide to see the mass error graph. /// </summary> public static string MassErrorReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_mass_error_graph { get { return ResourceManager.GetString("MassErrorReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_mass_error_gr" + "aph", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid iRT value at precusor m/z {0} for peptide {1}.. /// </summary> public static string MassListImporter_AddRow_Invalid_iRT_value_at_precusor_m_z__0__for_peptide__1_ { get { return ResourceManager.GetString("MassListImporter_AddRow_Invalid_iRT_value_at_precusor_m_z__0__for_peptide__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid library intensity at precursor {0} for peptide {1}.. /// </summary> public static string MassListImporter_AddRow_Invalid_library_intensity_at_precursor__0__for_peptide__1_ { get { return ResourceManager.GetString("MassListImporter_AddRow_Invalid_library_intensity_at_precursor__0__for_peptide__1" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Empty transition list.. /// </summary> public static string MassListImporter_Import_Empty_transition_list { get { return ResourceManager.GetString("MassListImporter_Import_Empty_transition_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to find peptide column.. /// </summary> public static string MassListImporter_Import_Failed_to_find_peptide_column { get { return ResourceManager.GetString("MassListImporter_Import_Failed_to_find_peptide_column", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing {0}. /// </summary> public static string MassListImporter_Import_Importing__0__ { get { return ResourceManager.GetString("MassListImporter_Import_Importing__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Inspecting peptide sequence information. /// </summary> public static string MassListImporter_Import_Inspecting_peptide_sequence_information { get { return ResourceManager.GetString("MassListImporter_Import_Inspecting_peptide_sequence_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid transition list. Transition lists must contain at least precursor m/z, product m/z, and peptide sequence.. /// </summary> public static string MassListImporter_Import_Invalid_transition_list_Transition_lists_must_contain_at_least_precursor_m_z_product_m_z_and_peptide_sequence { get { return ResourceManager.GetString("MassListImporter_Import_Invalid_transition_list_Transition_lists_must_contain_at_" + "least_precursor_m_z_product_m_z_and_peptide_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading transition list. /// </summary> public static string MassListImporter_Import_Reading_transition_list { get { return ResourceManager.GetString("MassListImporter_Import_Reading_transition_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z {0} does not match the closest possible value {1} (delta = {2}), peptide {3}.. /// </summary> public static string MassListRowReader_CalcPrecursorExplanations_ { get { return ResourceManager.GetString("MassListRowReader_CalcPrecursorExplanations_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the Instrument tab in the Transition Settings.. /// </summary> public static string MassListRowReader_CalcPrecursorExplanations_Check_the_Instrument_tab_in_the_Transition_Settings { get { return ResourceManager.GetString("MassListRowReader_CalcPrecursorExplanations_Check_the_Instrument_tab_in_the_Trans" + "ition_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the isolation scheme in the full scan tab of the transition settings.. /// </summary> public static string MassListRowReader_CalcPrecursorExplanations_Check_the_isolation_scheme_in_the_full_scan_settings_ { get { return ResourceManager.GetString("MassListRowReader_CalcPrecursorExplanations_Check_the_isolation_scheme_in_the_ful" + "l_scan_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z {0} of the peptide {1} is out of range for the instrument settings.. /// </summary> public static string MassListRowReader_CalcPrecursorExplanations_The_precursor_m_z__0__of_the_peptide__1__is_out_of_range_for_the_instrument_settings_ { get { return ResourceManager.GetString("MassListRowReader_CalcPrecursorExplanations_The_precursor_m_z__0__of_the_peptide_" + "_1__is_out_of_range_for_the_instrument_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z {0} of the peptide {1} is outside the range covered by the DIA isolation scheme.. /// </summary> public static string MassListRowReader_CalcPrecursorExplanations_The_precursor_m_z__0__of_the_peptide__1__is_outside_the_range_covered_by_the_DIA_isolation_scheme_ { get { return ResourceManager.GetString("MassListRowReader_CalcPrecursorExplanations_The_precursor_m_z__0__of_the_peptide_" + "_1__is_outside_the_range_covered_by_the_DIA_isolation_scheme_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/z value {0} in peptide {1} has no matching product ion.. /// </summary> public static string MassListRowReader_CalcTransitionExplanations_Product_m_z_value__0__in_peptide__1__has_no_matching_product_ion { get { return ResourceManager.GetString("MassListRowReader_CalcTransitionExplanations_Product_m_z_value__0__in_peptide__1_" + "_has_no_matching_product_ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The product m/z {0} is out of range for the instrument settings, in the peptide sequence {1}.. /// </summary> public static string MassListRowReader_CalcTransitionExplanations_The_product_m_z__0__is_out_of_range_for_the_instrument_settings__in_the_peptide_sequence__1_ { get { return ResourceManager.GetString("MassListRowReader_CalcTransitionExplanations_The_product_m_z__0__is_out_of_range_" + "for_the_instrument_settings__in_the_peptide_sequence__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the Modifications tab in Transition Settings.. /// </summary> public static string MassListRowReader_NextRow_Check_the_Modifications_tab_in_Transition_Settings { get { return ResourceManager.GetString("MassListRowReader_NextRow_Check_the_Modifications_tab_in_Transition_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid peptide sequence {0} found. /// </summary> public static string MassListRowReader_NextRow_Invalid_peptide_sequence__0__found { get { return ResourceManager.GetString("MassListRowReader_NextRow_Invalid_peptide_sequence__0__found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope labeled entry found without matching settings.. /// </summary> public static string MassListRowReader_NextRow_Isotope_labeled_entry_found_without_matching_settings_ { get { return ResourceManager.GetString("MassListRowReader_NextRow_Isotope_labeled_entry_found_without_matching_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peptide modified sequence column specified. /// </summary> public static string MassListRowReader_NextRow_No_peptide_sequence_column_specified { get { return ResourceManager.GetString("MassListRowReader_NextRow_No_peptide_sequence_column_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add checked modifications. /// </summary> public static string MatchModificationsControl_AddCheckedModifications_Add_checked_modifications { get { return ResourceManager.GetString("MatchModificationsControl_AddCheckedModifications_Add_checked_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add {0} modification {1}. /// </summary> public static string MatchModificationsControl_AddModification_Add__0__modification__1_ { get { return ResourceManager.GetString("MatchModificationsControl_AddModification_Add__0__modification__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Edit modifications. /// </summary> public static string MatchModificationsControl_Initialize__Edit_modifications { get { return ResourceManager.GetString("MatchModificationsControl_Initialize__Edit_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit &amp;heavy modifications.... /// </summary> public static string MatchModificationsControl_Initialize_Edit__heavy_modifications___ { get { return ResourceManager.GetString("MatchModificationsControl_Initialize_Edit__heavy_modifications___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit &amp;structural modifications.... /// </summary> public static string MatchModificationsControl_Initialize_Edit__structural_modifications___ { get { return ResourceManager.GetString("MatchModificationsControl_Initialize_Edit__structural_modifications___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Specify all modifications you want to include in the search:. /// </summary> public static string MatchModificationsControl_ModificationLabelText_DDA_Search { get { return ResourceManager.GetString("MatchModificationsControl_ModificationLabelText_DDA_Search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Charge state values must be greater than 0.. /// </summary> public static string MeasuredCollisionalCrossSection_Validate_Charge_state_values_must_be_greater_than_0_ { get { return ResourceManager.GetString("MeasuredCollisionalCrossSection_Validate_Charge_state_values_must_be_greater_than" + "_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collisional cross section values must be greater than 0.. /// </summary> public static string MeasuredCollisionalCrossSection_Validate_Collisional_cross_section_values_must_be_greater_than_0_ { get { return ResourceManager.GetString("MeasuredCollisionalCrossSection_Validate_Collisional_cross_section_values_must_be" + "_greater_than_0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0} {1}. /// </summary> public static string MeasuredDriftTimeTable_ValidateMeasuredDriftTimeCellValues_On_line__0___1_ { get { return ResourceManager.GetString("MeasuredDriftTimeTable_ValidateMeasuredDriftTimeCellValues_On_line__0___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiple charge states for custom ions are no longer supported.. /// </summary> public static string MeasuredIon_ReadXml_Multiple_charge_states_for_custom_ions_are_no_longer_supported_ { get { return ResourceManager.GetString("MeasuredIon_ReadXml_Multiple_charge_states_for_custom_ions_are_no_longer_supporte" + "d_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reporter ion masses must be greater than or equal to {0}.. /// </summary> public static string MeasuredIon_Validate_Reporter_ion_masses_must_be_greater_than_or_equal_to__0__ { get { return ResourceManager.GetString("MeasuredIon_Validate_Reporter_ion_masses_must_be_greater_than_or_equal_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reporter ion masses must be less than or equal to {0}.. /// </summary> public static string MeasuredIon_Validate_Reporter_ion_masses_must_be_less_than_or_equal_to__0__ { get { return ResourceManager.GetString("MeasuredIon_Validate_Reporter_ion_masses_must_be_less_than_or_equal_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reporter ions must specify a formula or valid monoisotopic and average masses.. /// </summary> public static string MeasuredIon_Validate_Reporter_ions_must_specify_a_formula_or_valid_monoisotopic_and_average_masses { get { return ResourceManager.GetString("MeasuredIon_Validate_Reporter_ions_must_specify_a_formula_or_valid_monoisotopic_a" + "nd_average_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Special fragment ions must have at least one fragmentation residue.. /// </summary> public static string MeasuredIon_Validate_Special_fragment_ions_must_have_at_least_one_fragmentation_residue { get { return ResourceManager.GetString("MeasuredIon_Validate_Special_fragment_ions_must_have_at_least_one_fragmentation_r" + "esidue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Special fragment ions must specify the terminal side of the amino acid residue on which fragmentation occurs.. /// </summary> public static string MeasuredIon_Validate_Special_fragment_ions_must_specify_the_terminal_side_of_the_amino_acid_residue_on_which_fragmentation_occurs { get { return ResourceManager.GetString("MeasuredIon_Validate_Special_fragment_ions_must_specify_the_terminal_side_of_the_" + "amino_acid_residue_on_which_fragmentation_occurs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The minimum length {0} must be between {1} and {2}.. /// </summary> public static string MeasuredIon_Validate_The_minimum_length__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("MeasuredIon_Validate_The_minimum_length__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Special ion:. /// </summary> public static string MeasuredIonList_Label_Special_ion { get { return ResourceManager.GetString("MeasuredIonList_Label_Special_ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Special Ions. /// </summary> public static string MeasuredIonList_Title_Edit_Special_Ions { get { return ResourceManager.GetString("MeasuredIonList_Title_Edit_Special_Ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured retention times must be greater than zero.. /// </summary> public static string MeasuredPeptide_ValidateRetentionTime_Measured_retention_times_must_be_greater_than_zero { get { return ResourceManager.GetString("MeasuredPeptide_ValidateRetentionTime_Measured_retention_times_must_be_greater_th" + "an_zero", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured retention times must be valid decimal numbers.. /// </summary> public static string MeasuredPeptide_ValidateRetentionTime_Measured_retention_times_must_be_valid_decimal_numbers { get { return ResourceManager.GetString("MeasuredPeptide_ValidateRetentionTime_Measured_retention_times_must_be_valid_deci" + "mal_numbers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A modified peptide sequence is required for each entry.. /// </summary> public static string MeasuredPeptide_ValidateSequence_A_modified_peptide_sequence_is_required_for_each_entry { get { return ResourceManager.GetString("MeasuredPeptide_ValidateSequence_A_modified_peptide_sequence_is_required_for_each" + "_entry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence &apos;{0}&apos; is not a valid modified peptide sequence.. /// </summary> public static string MeasuredPeptide_ValidateSequence_The_sequence__0__is_not_a_valid_modified_peptide_sequence { get { return ResourceManager.GetString("MeasuredPeptide_ValidateSequence_The_sequence__0__is_not_a_valid_modified_peptide" + "_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempting to recalculate peak integration without first completing raw data import.. /// </summary> public static string MeasuredResults_ChangeRecalcStatus_Attempting_to_recalculate_peak_integration_without_first_completing_raw_data_import_ { get { return ResourceManager.GetString("MeasuredResults_ChangeRecalcStatus_Attempting_to_recalculate_peak_integration_wit" + "hout_first_completing_raw_data_import_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The chromatogram cache must be loaded before it can be changed.. /// </summary> public static string MeasuredResults_CommitCacheFile_The_chromatogram_cache_must_be_loaded_before_it_can_be_changed { get { return ResourceManager.GetString("MeasuredResults_CommitCacheFile_The_chromatogram_cache_must_be_loaded_before_it_c" + "an_be_changed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The chromatogram cache must be loaded before it is optimized.. /// </summary> public static string MeasuredResults_OptimizeCache_The_chromatogram_cache_must_be_loaded_before_it_is_optimized { get { return ResourceManager.GetString("MeasuredResults_OptimizeCache_The_chromatogram_cache_must_be_loaded_before_it_is_" + "optimized", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured retention times must be positive values.. /// </summary> public static string MeasuredRetentionTime_Validate_Measured_retention_times_must_be_positive_values { get { return ResourceManager.GetString("MeasuredRetentionTime_Validate_Measured_retention_times_must_be_positive_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence {0} is not a valid peptide.. /// </summary> public static string MeasuredRetentionTime_Validate_The_sequence__0__is_not_a_valid_peptide { get { return ResourceManager.GetString("MeasuredRetentionTime_Validate_The_sequence__0__is_not_a_valid_peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Field. /// </summary> public static string MessageBoxHelper_GetControlMessage_Field { get { return ResourceManager.GetString("MessageBoxHelper_GetControlMessage_Field", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a comma separated list of adducts or integers describing charge states with absolute value from {1} to {2}.. /// </summary> public static string MessageBoxHelper_ValidateAdductListTextBox__0__must_contain_a_comma_separated_list_of_adducts_or_integers_describing_charge_states_with_absolute_value_from__1__to__2__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateAdductListTextBox__0__must_contain_a_comma_separated_lis" + "t_of_adducts_or_integers_describing_charge_states_with_absolute_value_from__1__t" + "o__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a comma separated list of adducts or integers describing charge states with absolute values from {1} to {2}.. /// </summary> public static string MessageBoxHelper_ValidateAdductListTextBox__0__must_contain_a_comma_separated_list_of_adducts_or_integers_describing_charge_states_with_absolute_values_from__1__to__2__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateAdductListTextBox__0__must_contain_a_comma_separated_lis" + "t_of_adducts_or_integers_describing_charge_states_with_absolute_values_from__1__" + "to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a comma separated list of decimal values from {1} to {2}.. /// </summary> public static string MessageBoxHelper_ValidateDecimalListTextBox__0__must_contain_a_comma_separated_list_of_decimal_values_from__1__to__2__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalListTextBox__0__must_contain_a_comma_separated_li" + "st_of_decimal_values_from__1__to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be greater than {1}.. /// </summary> public static string MessageBoxHelper_ValidateDecimalTextBox__0__must_be_greater_than__1__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalTextBox__0__must_be_greater_than__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be greater than or equal to {1}.. /// </summary> public static string MessageBoxHelper_ValidateDecimalTextBox__0__must_be_greater_than_or_equal_to__1__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalTextBox__0__must_be_greater_than_or_equal_to__1__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be less than {1}.. /// </summary> public static string MessageBoxHelper_ValidateDecimalTextBox__0__must_be_less_than__1__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalTextBox__0__must_be_less_than__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be less than or equal to {1}.. /// </summary> public static string MessageBoxHelper_ValidateDecimalTextBox__0__must_be_less_than_or_equal_to__1__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalTextBox__0__must_be_less_than_or_equal_to__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a decimal value.. /// </summary> public static string MessageBoxHelper_ValidateDecimalTextBox__0__must_contain_a_decimal_value { get { return ResourceManager.GetString("MessageBoxHelper_ValidateDecimalTextBox__0__must_contain_a_decimal_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} cannot be empty.. /// </summary> public static string MessageBoxHelper_ValidateNameTextBox__0__cannot_be_empty { get { return ResourceManager.GetString("MessageBoxHelper_ValidateNameTextBox__0__cannot_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a comma separated list of integers from {1} to {2}.. /// </summary> public static string MessageBoxHelper_ValidateNumberListTextBox__0__must_contain_a_comma_separated_list_of_integers_from__1__to__2__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateNumberListTextBox__0__must_contain_a_comma_separated_lis" + "t_of_integers_from__1__to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain an integer.. /// </summary> public static string MessageBoxHelper_ValidateNumberTextBox__0__must_contain_an_integer { get { return ResourceManager.GetString("MessageBoxHelper_ValidateNumberTextBox__0__must_contain_an_integer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Value {0} must be between {1} and {2} or {3} and {4}.. /// </summary> public static string MessageBoxHelper_ValidateSignedNumberTextBox_Value__0__must_be_between__1__and__2__or__3__and__4__ { get { return ResourceManager.GetString("MessageBoxHelper_ValidateSignedNumberTextBox_Value__0__must_be_between__1__and__2" + "__or__3__and__4__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error converting &apos;{0}&apos; to &apos;{1}&apos;:. /// </summary> public static string MetadataExtractor_ApplyStep_Error_converting___0___to___1___ { get { return ResourceManager.GetString("MetadataExtractor_ApplyStep_Error_converting___0___to___1___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find column {0}. /// </summary> public static string MetadataExtractor_ResolveColumn_Unable_to_find_column__0_ { get { return ResourceManager.GetString("MetadataExtractor_ResolveColumn_Unable_to_find_column__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} cannot be blank. /// </summary> public static string MetadataRuleEditor_OkDialog__0__cannot_be_blank { get { return ResourceManager.GetString("MetadataRuleEditor_OkDialog__0__cannot_be_blank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is already a result file rule set named &apos;{0}&apos;.. /// </summary> public static string MetadataRuleEditor_OkDialog_There_is_already_a_metadata_rule_named___0___ { get { return ResourceManager.GetString("MetadataRuleEditor_OkDialog_There_is_already_a_metadata_rule_named___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This is not a valid regular expression.. /// </summary> public static string MetadataRuleEditor_OkDialog_This_is_not_a_valid_regular_expression_ { get { return ResourceManager.GetString("MetadataRuleEditor_OkDialog_This_is_not_a_valid_regular_expression_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rule Set. /// </summary> public static string MetadataRuleSetList_Label_Rule_Set { get { return ResourceManager.GetString("MetadataRuleSetList_Label_Rule_Set", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rule Sets. /// </summary> public static string MetadataRuleSetList_Title_Rule_Sets { get { return ResourceManager.GetString("MetadataRuleSetList_Title_Rule_Sets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text &apos;{0}&apos; must either be a valid regular expression or blank. /// </summary> public static string MetadataRuleStepEditor_OkDialog__0__must_either_be_a_valid_regular_expression_or_blank { get { return ResourceManager.GetString("MetadataRuleStepEditor_OkDialog__0__must_either_be_a_valid_regular_expression_or_" + "blank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting method {0}.... /// </summary> public static string MethodExporter_ExportMethod_Exporting_method__0__ { get { return ResourceManager.GetString("MethodExporter_ExportMethod_Exporting_method__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting methods.... /// </summary> public static string MethodExporter_ExportMethod_Exporting_methods { get { return ResourceManager.GetString("MethodExporter_ExportMethod_Exporting_methods", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1/K0 lower limit. /// </summary> public static string Metrics_Col1K0LowerLimit__1_K0_lower_limit { get { return ResourceManager.GetString("Metrics_Col1K0LowerLimit__1_K0_lower_limit", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1/K0 upper limit. /// </summary> public static string Metrics_Col1K0UpperLimit__1_K0_upper_limit { get { return ResourceManager.GetString("Metrics_Col1K0UpperLimit__1_K0_upper_limit", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max sampling time (seconds). /// </summary> public static string Metrics_ColMaxSamplingTime_Max_sampling_time__seconds_ { get { return ResourceManager.GetString("Metrics_ColMaxSamplingTime_Max_sampling_time__seconds_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mean sampling time (seconds). /// </summary> public static string Metrics_ColMeanSamplingTime_Mean_sampling_time__seconds_ { get { return ResourceManager.GetString("Metrics_ColMeanSamplingTime_Mean_sampling_time__seconds_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z. /// </summary> public static string Metrics_ColMz_m_z { get { return ResourceManager.GetString("Metrics_ColMz_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to RT begin. /// </summary> public static string Metrics_ColRtBegin_RT_begin { get { return ResourceManager.GetString("Metrics_ColRtBegin_RT_begin", resourceCulture); } } /// <summary> /// Looks up a localized string similar to RT end. /// </summary> public static string Metrics_ColRtEnd_RT_end { get { return ResourceManager.GetString("Metrics_ColRtEnd_RT_end", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Target. /// </summary> public static string Metrics_ColTarget_Target { get { return ResourceManager.GetString("Metrics_ColTarget_Target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding spectra to MIDAS library. /// </summary> public static string MidasLibrary_AddSpectra_Adding_spectra_to_MIDAS_library { get { return ResourceManager.GetString("MidasLibrary_AddSpectra_Adding_spectra_to_MIDAS_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error loading MIDAS library for adding spectra.. /// </summary> public static string MidasLibrary_AddSpectra_Error_loading_MIDAS_library_for_adding_spectra_ { get { return ResourceManager.GetString("MidasLibrary_AddSpectra_Error_loading_MIDAS_library_for_adding_spectra_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reading MIDAS spectra. /// </summary> public static string MidasLibrary_AddSpectra_Reading_MIDAS_spectra { get { return ResourceManager.GetString("MidasLibrary_AddSpectra_Reading_MIDAS_spectra", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error reading LibInfo from MIDAS library. /// </summary> public static string MidasLibrary_Load_Error_reading_LibInfo_from_MIDAS_library { get { return ResourceManager.GetString("MidasLibrary_Load_Error_reading_LibInfo_from_MIDAS_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading MIDAS library. /// </summary> public static string MidasLibrary_Load_Loading_MIDAS_library { get { return ResourceManager.GetString("MidasLibrary_Load_Loading_MIDAS_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MIDAS Spectral Library. /// </summary> public static string MidasLibrary_SpecFilter_MIDAS_Spectral_Library { get { return ResourceManager.GetString("MidasLibrary_SpecFilter_MIDAS_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The cache file has not been loaded yet.. /// </summary> public static string MinimizeResultsDlg_ChromCacheMinimizer_The_cache_file_has_not_been_loaded_yet { get { return ResourceManager.GetString("MinimizeResultsDlg_ChromCacheMinimizer_The_cache_file_has_not_been_loaded_yet", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All results must be completely imported before any can be minimized.. /// </summary> public static string MinimizeResultsDlg_Minimize_All_results_must_be_completely_imported_before_any_can_be_minimized { get { return ResourceManager.GetString("MinimizeResultsDlg_Minimize_All_results_must_be_completely_imported_before_any_ca" + "n_be_minimized", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You have not chosen any options to minimize your cache file. Are you sure you want to continue?. /// </summary> public static string MinimizeResultsDlg_Minimize_You_have_not_chosen_any_options_to_minimize_your_cache_file_Are_you_sure_you_want_to_continue { get { return ResourceManager.GetString("MinimizeResultsDlg_Minimize_You_have_not_chosen_any_options_to_minimize_your_cach" + "e_file_Are_you_sure_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred while saving the data cache file {0}.. /// </summary> public static string MinimizeResultsDlg_MinimizeToFile_An_unexpected_error_occurred_while_saving_the_data_cache_file__0__ { get { return ResourceManager.GetString("MinimizeResultsDlg_MinimizeToFile_An_unexpected_error_occurred_while_saving_the_d" + "ata_cache_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Saving new cache file. /// </summary> public static string MinimizeResultsDlg_MinimizeToFile_Saving_new_cache_file { get { return ResourceManager.GetString("MinimizeResultsDlg_MinimizeToFile_Saving_new_cache_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The noise time limit must be a positive decimal number.. /// </summary> public static string MinimizeResultsDlg_tbxNoiseTimeRange_Leave_The_noise_time_limit_must_be_a_positive_decimal_number { get { return ResourceManager.GetString("MinimizeResultsDlg_tbxNoiseTimeRange_Leave_The_noise_time_limit_must_be_a_positiv" + "e_decimal_number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The noise time limit must be a valid decimal number.. /// </summary> public static string MinimizeResultsDlg_tbxNoiseTimeRange_Leave_The_noise_time_limit_must_be_a_valid_decimal_number { get { return ResourceManager.GetString("MinimizeResultsDlg_tbxNoiseTimeRange_Leave_The_noise_time_limit_must_be_a_valid_d" + "ecimal_number", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mismatched transitions. /// </summary> public static string MismatchedIsotopeTransitionsFinder_DisplayName_Mismatched_transitions { get { return ResourceManager.GetString("MismatchedIsotopeTransitionsFinder_DisplayName_Mismatched_transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing all results. /// </summary> public static string MissingAllResultsFinder_DisplayName_Missing_all_results { get { return ResourceManager.GetString("MissingAllResultsFinder_DisplayName_Missing_all_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing any results. /// </summary> public static string MissingAnyResultsFinder_DisplayName_Missing_any_results { get { return ResourceManager.GetString("MissingAnyResultsFinder_DisplayName_Missing_any_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No matching library data. /// </summary> public static string MissingLibraryDataFinder_DisplayName_No_matching_library_data { get { return ResourceManager.GetString("MissingLibraryDataFinder_DisplayName_No_matching_library_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to missing scores. /// </summary> public static string MissingScoresFinder_DisplayName_missing_scores { get { return ResourceManager.GetString("MissingScoresFinder_DisplayName_missing_scores", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} missing from chromatogram peak. /// </summary> public static string MissingScoresFinder_Match__0__missing_from_chromatogram_peak { get { return ResourceManager.GetString("MissingScoresFinder_Match__0__missing_from_chromatogram_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} missing from peptide. /// </summary> public static string MissingScoresFinder_Match__0__missing_from_peptide { get { return ResourceManager.GetString("MissingScoresFinder_Match__0__missing_from_peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to create a new empty document?. /// </summary> public static string ModeUIAwareFormHelper_EnableNeededButtonsForModeUI___Would_you_like_to_create_a_new_empty_document_ { get { return ResourceManager.GetString("ModeUIAwareFormHelper_EnableNeededButtonsForModeUI___Would_you_like_to_create_a_n" + "ew_empty_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot switch to molecule interface because the current document contains proteomics data.. /// </summary> public static string ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Cannot_switch_to_molecule_interface_because_the_current_document_contains_proteomics_data_ { get { return ResourceManager.GetString("ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Cannot_switch_to_molecule_inte" + "rface_because_the_current_document_contains_proteomics_data_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot switch to proteomics interface because the current document contains non-proteomic molecule data.. /// </summary> public static string ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Cannot_switch_to_proteomics_interface_because_the_current_document_contains_small_molecules_data_ { get { return ResourceManager.GetString("ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Cannot_switch_to_proteomics_in" + "terface_because_the_current_document_contains_small_molecules_data_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to create a new document?. /// </summary> public static string ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Would_you_like_to_create_a_new_document_ { get { return ResourceManager.GetString("ModeUIAwareFormHelper_EnableNeededButtonsForModeUI_Would_you_like_to_create_a_new" + "_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mixed interface. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Mixed_interface { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Mixed_interface", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only show menus and controls appropriate to proteomics analysis. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Only_show_menus_and_controls_appropriate_to_proteomics_analysis { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Only_show_menus_and_controls_appr" + "opriate_to_proteomics_analysis", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Only show menus and controls appropriate to non-proteomic molecule analysis. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Only_show_menus_and_controls_appropriate_to_small_molecule_analysis { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Only_show_menus_and_controls_appr" + "opriate_to_small_molecule_analysis", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteomics interface. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Proteomics_interface { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Proteomics_interface", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show all menus and controls. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Show_all_menus_and_controls { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Show_all_menus_and_controls", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule interface. /// </summary> public static string ModeUIAwareFormHelper_SetModeUIToolStripButtons_Small_Molecules_interface { get { return ResourceManager.GetString("ModeUIAwareFormHelper_SetModeUIToolStripButtons_Small_Molecules_interface", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized modification placement for Unimod id {0} in modified peptide sequence {1} (amino acid {2}, {3}).. /// </summary> public static string ModificationMatcher_ThrowUnimodException_Unrecognized_modification_placement_for_Unimod_id__0__in_modified_peptide_sequence__1___amino_acid__2____3___ { get { return ResourceManager.GetString("ModificationMatcher_ThrowUnimodException_Unrecognized_modification_placement_for_" + "Unimod_id__0__in_modified_peptide_sequence__1___amino_acid__2____3___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized Unimod id {0} in modified peptide sequence {1} (amino acid {2}, {3}).. /// </summary> public static string ModificationMatcher_ThrowUnimodException_Unrecognized_Unimod_id__0__in_modified_peptide_sequence__1___amino_acid__2____3___ { get { return ResourceManager.GetString("ModificationMatcher_ThrowUnimodException_Unrecognized_Unimod_id__0__in_modified_p" + "eptide_sequence__1___amino_acid__2____3___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid attempt to add data to completed MassLookup.. /// </summary> public static string ModMassLookup_Add_Invalid_attempt_to_add_data_to_completed_MassLookup { get { return ResourceManager.GetString("ModMassLookup_Add_Invalid_attempt_to_add_data_to_completed_MassLookup", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid attempt to access incomplete MassLookup.. /// </summary> public static string ModMassLookup_MatchModificationMass_Invalid_attempt_to_access_incomplete_MassLookup { get { return ResourceManager.GetString("ModMassLookup_MatchModificationMass_Invalid_attempt_to_access_incomplete_MassLook" + "up", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Molecule { get { object obj = ResourceManager.GetObject("Molecule", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeIrt { get { object obj = ResourceManager.GetObject("MoleculeIrt", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeIrtLib { get { object obj = ResourceManager.GetObject("MoleculeIrtLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeLib { get { object obj = ResourceManager.GetObject("MoleculeLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeList { get { object obj = ResourceManager.GetObject("MoleculeList", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeStandard { get { object obj = ResourceManager.GetObject("MoleculeStandard", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeStandardLib { get { object obj = ResourceManager.GetObject("MoleculeStandardLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MoleculeUI { get { object obj = ResourceManager.GetObject("MoleculeUI", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Export mProphet Features. /// </summary> public static string MProphetFeaturesDlg_OkDialog_Export_mProphet_Features { get { return ResourceManager.GetString("MProphetFeaturesDlg_OkDialog_Export_mProphet_Features", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to save mProphet features to {0}.. /// </summary> public static string MProphetFeaturesDlg_OkDialog_Failed_attempting_to_save_mProphet_features_to__0__ { get { return ResourceManager.GetString("MProphetFeaturesDlg_OkDialog_Failed_attempting_to_save_mProphet_features_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to mProphet Feature Files. /// </summary> public static string MProphetFeaturesDlg_OkDialog_mProphet_Feature_Files { get { return ResourceManager.GetString("MProphetFeaturesDlg_OkDialog_mProphet_Feature_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to To export MProphet features first train an MProphet model.. /// </summary> public static string MProphetFeaturesDlg_OkDialog_To_export_MProphet_features_first_train_an_MProphet_model_ { get { return ResourceManager.GetString("MProphetFeaturesDlg_OkDialog_To_export_MProphet_features_first_train_an_MProphet_" + "model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempted to score a peak with {0} features using a model with {1} trained scores.. /// </summary> public static string MProphetPeakScoringModel_CalcLinearScore_Attempted_to_score_a_peak_with__0__features_using_a_model_with__1__trained_scores_ { get { return ResourceManager.GetString("MProphetPeakScoringModel_CalcLinearScore_Attempted_to_score_a_peak_with__0__featu" + "res_using_a_model_with__1__trained_scores_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insufficient decoy peaks ({0} with {1} targets) to continue training.. /// </summary> public static string MProphetPeakScoringModel_CalculateWeights_Insufficient_decoy_peaks___0__with__1__targets__to_continue_training_ { get { return ResourceManager.GetString("MProphetPeakScoringModel_CalculateWeights_Insufficient_decoy_peaks___0__with__1__" + "targets__to_continue_training_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insufficient target peaks ({0} with {1} decoys) detected at {2}% FDR to continue training.. /// </summary> public static string MProphetPeakScoringModel_CalculateWeights_Insufficient_target_peaks___0__with__1__decoys__detected_at__2___FDR_to_continue_training_ { get { return ResourceManager.GetString("MProphetPeakScoringModel_CalculateWeights_Insufficient_target_peaks___0__with__1_" + "_decoys__detected_at__2___FDR_to_continue_training_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MProphetScoringModel was given a peak with {0} features, but it has {1} peak feature calculators. /// </summary> public static string MProphetPeakScoringModel_CreateTransitionGroups_MProphetScoringModel_was_given_a_peak_with__0__features__but_it_has__1__peak_feature_calculators { get { return ResourceManager.GetString("MProphetPeakScoringModel_CreateTransitionGroups_MProphetScoringModel_was_given_a_" + "peak_with__0__features__but_it_has__1__peak_feature_calculators", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MProphetPeakScoringModel requires at least one peak feature calculator with a weight value. /// </summary> public static string MProphetPeakScoringModel_DoValidate_MProphetPeakScoringModel_requires_at_least_one_peak_feature_calculator_with_a_weight_value { get { return ResourceManager.GetString("MProphetPeakScoringModel_DoValidate_MProphetPeakScoringModel_requires_at_least_on" + "e_peak_feature_calculator_with_a_weight_value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scoring model converged (iteration {0} - {1:#,#} peaks at {2:0.##%} FDR). /// </summary> public static string MProphetPeakScoringModel_Train_Scoring_model_converged__iteration__0_____1______peaks_at__2_0_____FDR_ { get { return ResourceManager.GetString("MProphetPeakScoringModel_Train_Scoring_model_converged__iteration__0_____1______p" + "eaks_at__2_0_____FDR_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Training peak scoring model. /// </summary> public static string MProphetPeakScoringModel_Train_Training_peak_scoring_model { get { return ResourceManager.GetString("MProphetPeakScoringModel_Train_Training_peak_scoring_model", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Training scoring model (iteration {0} of {1}). /// </summary> public static string MProphetPeakScoringModel_Train_Training_scoring_model__iteration__0__of__1__ { get { return ResourceManager.GetString("MProphetPeakScoringModel_Train_Training_scoring_model__iteration__0__of__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Training scoring model (iteration {0} of {1} - {2:#,#} peaks at {3:0.##%} FDR). /// </summary> public static string MProphetPeakScoringModel_Train_Training_scoring_model__iteration__0__of__1_____2______peaks_at__3_0_____FDR_ { get { return ResourceManager.GetString("MProphetPeakScoringModel_Train_Training_scoring_model__iteration__0__of__1_____2_" + "_____peaks_at__3_0_____FDR_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adjusting peak boundaries. /// </summary> public static string MProphetResultsHandler_ChangePeaks_Adjusting_peak_boundaries { get { return ResourceManager.GetString("MProphetResultsHandler_ChangePeaks_Adjusting_peak_boundaries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Q Value. /// </summary> public static string MProphetResultsHandler_Q_VALUE_ANNOTATION_Q_Value { get { return ResourceManager.GetString("MProphetResultsHandler_Q_VALUE_ANNOTATION_Q_Value", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Co-elution. /// </summary> public static string MQuestCoElutionCalc_MQuestCoElutionCalc_Coelution { get { return ResourceManager.GetString("MQuestCoElutionCalc_MQuestCoElutionCalc_Coelution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default intensity. /// </summary> public static string MQuestDefaultIntensityCalc_Name_Default_Intensity { get { return ResourceManager.GetString("MQuestDefaultIntensityCalc_Name_Default_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default dotp or idotp. /// </summary> public static string MQuestDefaultIntensityCorrelationCalc_Name_Default_dotp_or_idotp { get { return ResourceManager.GetString("MQuestDefaultIntensityCorrelationCalc_Name_Default_dotp_or_idotp", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default co-elution (weighted). /// </summary> public static string MQuestDefaultWeightedCoElutionCalc_Name_Default_co_elution__weighted_ { get { return ResourceManager.GetString("MQuestDefaultWeightedCoElutionCalc_Name_Default_co_elution__weighted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default shape (weighted). /// </summary> public static string MQuestDefaultWeightedShapeCalc_Name_Default_shape__weighted_ { get { return ResourceManager.GetString("MQuestDefaultWeightedShapeCalc_Name_Default_shape__weighted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Intensity. /// </summary> public static string MQuestIntensityCalc_MQuestIntensityCalc_Intensity { get { return ResourceManager.GetString("MQuestIntensityCalc_MQuestIntensityCalc_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library intensity dot-product. /// </summary> public static string MQuestIntensityCorrelationCalc_Name_Library_intensity_dot_product { get { return ResourceManager.GetString("MQuestIntensityCorrelationCalc_Name_Library_intensity_dot_product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard library dot-product. /// </summary> public static string MQuestIntensityStandardCorrelationCalc_Name_Standard_library_dot_product { get { return ResourceManager.GetString("MQuestIntensityStandardCorrelationCalc_Name_Standard_library_dot_product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference co-elution. /// </summary> public static string MQuestReferenceCoElutionCalc_MQuestReferenceCoElutionCalc_Reference_coelution { get { return ResourceManager.GetString("MQuestReferenceCoElutionCalc_MQuestReferenceCoElutionCalc_Reference_coelution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference intensity dot-product. /// </summary> public static string MQuestReferenceCorrelationCalc_MQuestReferenceCorrelationCalc_mQuest_reference_correlation { get { return ResourceManager.GetString("MQuestReferenceCorrelationCalc_MQuestReferenceCorrelationCalc_mQuest_reference_co" + "rrelation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to mProphet weighted reference. /// </summary> public static string MQuestReferenceShapeCalc_MQuestReferenceShapeCalc_mProphet_weighted_reference { get { return ResourceManager.GetString("MQuestReferenceShapeCalc_MQuestReferenceShapeCalc_mProphet_weighted_reference", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference shape. /// </summary> public static string MQuestReferenceShapeCalc_MQuestReferenceShapeCalc_Reference_shape { get { return ResourceManager.GetString("MQuestReferenceShapeCalc_MQuestReferenceShapeCalc_Reference_shape", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time difference. /// </summary> public static string MQuestRetentionTimePredictionCalc_MQuestRetentionTimePredictionCalc_Retention_time_difference { get { return ResourceManager.GetString("MQuestRetentionTimePredictionCalc_MQuestRetentionTimePredictionCalc_Retention_tim" + "e_difference", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time difference squared. /// </summary> public static string MQuestRetentionTimeSquaredPredictionCalc_MQuestRetentionTimeSquaredPredictionCalc_Retention_time_difference_squared { get { return ResourceManager.GetString("MQuestRetentionTimeSquaredPredictionCalc_MQuestRetentionTimeSquaredPredictionCalc" + "_Retention_time_difference_squared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shape. /// </summary> public static string MQuestShapeCalc_MQuestShapeCalc_Shape { get { return ResourceManager.GetString("MQuestShapeCalc_MQuestShapeCalc_Shape", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard Intensity. /// </summary> public static string MQuestStandardIntensityCalc_Name_Standard_Intensity { get { return ResourceManager.GetString("MQuestStandardIntensityCalc_Name_Standard_Intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard co-elution (weighted). /// </summary> public static string MQuestStandardWeightedCoElutionCalc_Name_Standard_co_elution__weighted_ { get { return ResourceManager.GetString("MQuestStandardWeightedCoElutionCalc_Name_Standard_co_elution__weighted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard shape (weighted). /// </summary> public static string MQuestStandardWeightedShapeCalc_Name_Standard_shape__weighted_ { get { return ResourceManager.GetString("MQuestStandardWeightedShapeCalc_Name_Standard_shape__weighted_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Co-elution (weighted). /// </summary> public static string MQuestWeightedCoElutionCalc_MQuestWeightedCoElutionCalc_mQuest_weighted_coelution { get { return ResourceManager.GetString("MQuestWeightedCoElutionCalc_MQuestWeightedCoElutionCalc_mQuest_weighted_coelution" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference co-elution (weighted). /// </summary> public static string MQuestWeightedReferenceCoElutionCalc_MQuestWeightedReferenceCoElutionCalc_mQuest_weighted_reference_coelution { get { return ResourceManager.GetString("MQuestWeightedReferenceCoElutionCalc_MQuestWeightedReferenceCoElutionCalc_mQuest_" + "weighted_reference_coelution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reference shape (weighted). /// </summary> public static string MQuestWeightedReferenceShapeCalc_MQuestWeightedReferenceShapeCalc_mProphet_weighted_reference_shape { get { return ResourceManager.GetString("MQuestWeightedReferenceShapeCalc_MQuestWeightedReferenceShapeCalc_mProphet_weight" + "ed_reference_shape", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shape (weighted). /// </summary> public static string MQuestWeightedShapeCalc_MQuestWeightedShapeCalc_mQuest_weighted_shape { get { return ResourceManager.GetString("MQuestWeightedShapeCalc_MQuestWeightedShapeCalc_mQuest_weighted_shape", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap MSAmandaLogo { get { object obj = ResourceManager.GetObject("MSAmandaLogo", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to MS/MS scan index {0} not found. /// </summary> public static string MsxDemultiplexer_FindStartStop_MsxDemultiplexer_MS_MS_index__0__not_found { get { return ResourceManager.GetString("MsxDemultiplexer_FindStartStop_MsxDemultiplexer_MS_MS_index__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading {0}. /// </summary> public static string MultiFileAsynchronousDownloadClient_DownloadFileAsync_Downloading__0 { get { return ResourceManager.GetString("MultiFileAsynchronousDownloadClient_DownloadFileAsync_Downloading__0", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading {0}. /// </summary> public static string MultiFileAsynchronousDownloadClient_DownloadFileAsync_Downloading__0_ { get { return ResourceManager.GetString("MultiFileAsynchronousDownloadClient_DownloadFileAsync_Downloading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Download canceled.. /// </summary> public static string MultiFileAsynchronousDownloadClient_DownloadFileAsyncWithBroker_Download_canceled_ { get { return ResourceManager.GetString("MultiFileAsynchronousDownloadClient_DownloadFileAsyncWithBroker_Download_canceled" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check the Modification tab in the Peptide Settings, the m/z types on the Prediction tab, or the m/z match tolerance on the Instrument tab of the Transition Settings.. /// </summary> public static string MzMatchException_suggestion { get { return ResourceManager.GetString("MzMatchException_suggestion", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap NewDocument { get { object obj = ResourceManager.GetObject("NewDocument", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Precursor-product shape score. /// </summary> public static string NextGenCrossWeightedShapeCalc_NextGenCrossWeightedShapeCalc_Precursor_product_shape_score { get { return ResourceManager.GetString("NextGenCrossWeightedShapeCalc_NextGenCrossWeightedShapeCalc_Precursor_product_sha" + "pe_score", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor isotope dot product. /// </summary> public static string NextGenIsotopeDotProductCalc_NextGenIsotopeDotProductCalc_Precursor_isotope_dot_product { get { return ResourceManager.GetString("NextGenIsotopeDotProductCalc_NextGenIsotopeDotProductCalc_Precursor_isotope_dot_p" + "roduct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor mass error. /// </summary> public static string NextGenPrecursorMassErrorCalc_NextGenPrecursorMassErrorCalc_Precursor_mass_error { get { return ResourceManager.GetString("NextGenPrecursorMassErrorCalc_NextGenPrecursorMassErrorCalc_Precursor_mass_error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product mass error. /// </summary> public static string NextGenProductMassErrorCalc_NextGenProductMassErrorCalc_Product_mass_error { get { return ResourceManager.GetString("NextGenProductMassErrorCalc_NextGenProductMassErrorCalc_Product_mass_error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Signal to noise. /// </summary> public static string NextGenSignalNoiseCalc_NextGenSignalNoiseCalc_Signal_to_noise { get { return ResourceManager.GetString("NextGenSignalNoiseCalc_NextGenSignalNoiseCalc_Signal_to_noise", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard product mass error. /// </summary> public static string NextGenStandardProductMassErrorCalc_Name_Standard_product_mass_error { get { return ResourceManager.GetString("NextGenStandardProductMassErrorCalc_Name_Standard_product_mass_error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Standard signal to noise. /// </summary> public static string NextGenStandardSignalNoiseCalc_Name_Standard_signal_to_noise { get { return ResourceManager.GetString("NextGenStandardSignalNoiseCalc_Name_Standard_signal_to_noise", resourceCulture); } } /// <summary> /// Looks up a localized string similar to NIST Spectral Library. /// </summary> public static string NistLibrary_SpecFilter_NIST_Spectral_Library { get { return ResourceManager.GetString("NistLibrary_SpecFilter_NIST_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Imported library contains multiple entries for one or more {0} pairs. /// ///This is probably due to the library containing entries for multiple parameters ///such as instrument type or collision energy, but Skyline keys only on ///{0} so these entries are ambiguous. /// ///You should filter the library as needed before importing to Skyline. /// ///Here are the {0} pairs with multiple entries: ///{1}. /// </summary> public static string NistLibraryBase_CreateCache_ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not read the precursor m/z value &quot;{0}&quot;. /// </summary> public static string NistLibraryBase_CreateCache_Could_not_read_the_precursor_m_z_value___0__ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_Could_not_read_the_precursor_m_z_value___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid format at peak {0} for {1}.. /// </summary> public static string NistLibraryBase_CreateCache_Invalid_format_at_peak__0__for__1__ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_Invalid_format_at_peak__0__for__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to molecule+adduct. /// </summary> public static string NistLibraryBase_CreateCache_molecule_adduct { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_molecule_adduct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peaks found for peptide {0}.. /// </summary> public static string NistLibraryBase_CreateCache_No_peaks_found_for_peptide__0__ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_No_peaks_found_for_peptide__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak count for MS/MS spectrum exceeds maximum {0}.. /// </summary> public static string NistLibraryBase_CreateCache_Peak_count_for_MS_MS_spectrum_excedes_maximum__0__ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_Peak_count_for_MS_MS_spectrum_excedes_maximum__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to peptide+charge. /// </summary> public static string NistLibraryBase_CreateCache_peptide_charge { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_peptide_charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected end of file.. /// </summary> public static string NistLibraryBase_CreateCache_Unexpected_end_of_file { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_Unexpected_end_of_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected end of file in peaks for {0}.. /// </summary> public static string NistLibraryBase_CreateCache_Unexpected_end_of_file_in_peaks_for__0__ { get { return ResourceManager.GetString("NistLibraryBase_CreateCache_Unexpected_end_of_file_in_peaks_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building binary cache for {0} library. /// </summary> public static string NistLibraryBase_Load_Building_binary_cache_for__0__library { get { return ResourceManager.GetString("NistLibraryBase_Load_Building_binary_cache_for__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed loading library &apos;{0}&apos;.. /// </summary> public static string NistLibraryBase_Load_Failed_loading_library__0__ { get { return ResourceManager.GetString("NistLibraryBase_Load_Failed_loading_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid precursor charge found. File may be corrupted.. /// </summary> public static string NistLibraryBase_Load_Invalid_precursor_charge_found_File_may_be_corrupted { get { return ResourceManager.GetString("NistLibraryBase_Load_Invalid_precursor_charge_found_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0} library. /// </summary> public static string NistLibraryBase_Load_Loading__0__library { get { return ResourceManager.GetString("NistLibraryBase_Load_Loading__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file &apos;{0}&apos; is not a valid library.. /// </summary> public static string NistLibraryBase_Load_The_file___0___is_not_a_valid_library_ { get { return ResourceManager.GetString("NistLibraryBase_Load_The_file___0___is_not_a_valid_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure trying to read peaks. /// </summary> public static string NistLibraryBase_ReadSpectrum_Failure_trying_to_read_peaks { get { return ResourceManager.GetString("NistLibraryBase_ReadSpectrum_Failure_trying_to_read_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} (line {1}): {2}. /// </summary> public static string NistLibraryBase_ThrowIOException__0__line__1__2__ { get { return ResourceManager.GetString("NistLibraryBase_ThrowIOException__0__line__1__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to TFRatio. /// </summary> public static string NistLibSpecBase_PEP_RANK_TFRATIO_TFRatio { get { return ResourceManager.GetString("NistLibSpecBase_PEP_RANK_TFRATIO_TFRatio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No centroided data available for file &quot;{0}&quot;. Adjust your Full-Scan settings.. /// </summary> public static string NoCentroidedDataException_NoCentroidedDataException_No_centroided_data_available_for_file___0_____Adjust_your_Full_Scan_settings_ { get { return ResourceManager.GetString("NoCentroidedDataException_NoCentroidedDataException_No_centroided_data_available_" + "for_file___0_____Adjust_your_Full_Scan_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No scans in {0} match the current filter settings.. /// </summary> public static string NoFullScanDataException_NoFullScanDataException_No_scans_in__0__match_the_current_filter_settings_ { get { return ResourceManager.GetString("NoFullScanDataException_NoFullScanDataException_No_scans_in__0__match_the_current" + "_filter_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not contain SRM/MRM chromatograms. To extract chromatograms from its spectra, go to Settings &gt; Transition Settings &gt; Full-Scan and choose options appropriate to the acquisition method used.. /// </summary> public static string NoFullScanFilteringException_NoFullScanFilteringException_The_file__0__does_not_contain_SRM_MRM_chromatograms__To_extract_chromatograms_from_its_spectra__go_to_Settings___Transition_Settings___Full_Scan_and_choose_options_appropriate_to_the_acquisition_method_used_ { get { return ResourceManager.GetString(@"NoFullScanFilteringException_NoFullScanFilteringException_The_file__0__does_not_contain_SRM_MRM_chromatograms__To_extract_chromatograms_from_its_spectra__go_to_Settings___Transition_Settings___Full_Scan_and_choose_options_appropriate_to_the_acquisition_method_used_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap NoPeak { get { object obj = ResourceManager.GetObject("NoPeak", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to No SRM/MRM data found in {0}.. /// </summary> public static string NoSrmDataException_NoSrmDataException_No_SRM_MRM_data_found_in__0__ { get { return ResourceManager.GetString("NoSrmDataException_NoSrmDataException_No_SRM_MRM_data_found_in__0__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Note { get { object obj = ResourceManager.GetObject("Note", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to {0} (not found). /// </summary> public static string NotFoundChromGraphItem_NotFoundChromGraphItem__0__not_found { get { return ResourceManager.GetString("NotFoundChromGraphItem_NotFoundChromGraphItem__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OK. /// </summary> public static string OK { get { return ResourceManager.GetString("OK", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempted modification of a read-only collection.. /// </summary> public static string OneOrManyList_Add_Attempted_modification_of_a_read_only_collection { get { return ResourceManager.GetString("OneOrManyList_Add_Attempted_modification_of_a_read_only_collection", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The index {0} must be 0 for a single entry list.. /// </summary> public static string OneOrManyList_ValidateIndex_The_index__0__must_be_0_for_a_single_entry_list { get { return ResourceManager.GetString("OneOrManyList_ValidateIndex_The_index__0__must_be_0_for_a_single_entry_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The index {0} must be between 0 and {1}.. /// </summary> public static string OneOrManyList_ValidateIndex_The_index__0__must_be_between_0_and__1__ { get { return ResourceManager.GetString("OneOrManyList_ValidateIndex_The_index__0__must_be_between_0_and__1__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Open { get { object obj = ResourceManager.GetObject("Open", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to No remote accounts have been specified. If you have an existing Unifi account you can enter your login information now.. /// </summary> public static string OpenDataSourceDialog_EnsureRemoteAccount_No_remote_accounts_have_been_specified__If_you_have_an_existing_Unifi_account_you_can_enter_your_login_information_now_ { get { return ResourceManager.GetString("OpenDataSourceDialog_EnsureRemoteAccount_No_remote_accounts_have_been_specified__" + "If_you_have_an_existing_Unifi_account_you_can_enter_your_login_information_now_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to My Network Place. /// </summary> public static string OpenDataSourceDialog_lookInComboBox_SelectionChangeCommitted_My_Network_Place { get { return ResourceManager.GetString("OpenDataSourceDialog_lookInComboBox_SelectionChangeCommitted_My_Network_Place", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error. /// </summary> public static string OpenDataSourceDialog_Open_Error { get { return ResourceManager.GetString("OpenDataSourceDialog_Open_Error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select one or more data sources.. /// </summary> public static string OpenDataSourceDialog_Open_Please_select_one_or_more_data_sources { get { return ResourceManager.GetString("OpenDataSourceDialog_Open_Please_select_one_or_more_data_sources", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Any spectra format. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_Any_spectra_format { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_Any_spectra_format", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Desktop. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_Desktop { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_Desktop", resourceCulture); } } /// <summary> /// Looks up a localized string similar to My Computer. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_My_Computer { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_My_Computer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to My Documents. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_My_Documents { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_My_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to My Recent Documents. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_My_Recent_Documents { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_My_Recent_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remote Accounts. /// </summary> public static string OpenDataSourceDialog_OpenDataSourceDialog_Remote_Accounts { get { return ResourceManager.GetString("OpenDataSourceDialog_OpenDataSourceDialog_Remote_Accounts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to access failure. /// </summary> public static string OpenDataSourceDialog_populateComboBoxFromDirectory_access_failure { get { return ResourceManager.GetString("OpenDataSourceDialog_populateComboBoxFromDirectory_access_failure", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Local Drive. /// </summary> public static string OpenDataSourceDialog_populateComboBoxFromDirectory_Local_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateComboBoxFromDirectory_Local_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Network Share. /// </summary> public static string OpenDataSourceDialog_populateComboBoxFromDirectory_Network_Share { get { return ResourceManager.GetString("OpenDataSourceDialog_populateComboBoxFromDirectory_Network_Share", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optical Drive. /// </summary> public static string OpenDataSourceDialog_populateComboBoxFromDirectory_Optical_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateComboBoxFromDirectory_Optical_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removable Drive. /// </summary> public static string OpenDataSourceDialog_populateComboBoxFromDirectory_Removable_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateComboBoxFromDirectory_Removable_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to access failure. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_access_failure { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_access_failure", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to retrieve the contents of this directory.. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_An_error_occurred_attempting_to_retrieve_the_contents_of_this_directory { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_An_error_occurred_attempting_t" + "o_retrieve_the_contents_of_this_directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Local Drive. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_Local_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_Local_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Network Share. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_Network_Share { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_Network_Share", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optical Drive. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_Optical_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_Optical_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removable Drive. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_Removable_Drive { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_Removable_Drive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retry. /// </summary> public static string OpenDataSourceDialog_populateListViewFromDirectory_Retry { get { return ResourceManager.GetString("OpenDataSourceDialog_populateListViewFromDirectory_Retry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Some source paths are invalid. /// </summary> public static string OpenDataSourceDialog_sourcePathTextBox_KeyUp_Some_source_paths_are_invalid { get { return ResourceManager.GetString("OpenDataSourceDialog_sourcePathTextBox_KeyUp_Some_source_paths_are_invalid", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap OpenFolder { get { object obj = ResourceManager.GetObject("OpenFolder", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to The number of optimization steps {0} is not between {1} and {2}.. /// </summary> public static string OptimizableRegression_Validate_The_number_of_optimization_steps__0__is_not_between__1__and__2__ { get { return ResourceManager.GetString("OptimizableRegression_Validate_The_number_of_optimization_steps__0__is_not_betwee" + "n__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The optimization step size {0} is not greater than zero.. /// </summary> public static string OptimizableRegression_Validate_The_optimization_step_size__0__is_not_greater_than_zero { get { return ResourceManager.GetString("OptimizableRegression_Validate_The_optimization_step_size__0__is_not_greater_than" + "_zero", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to convert {0} optimizations to new format.. /// </summary> public static string OptimizationDb_ConvertFromOldFormat_Failed_to_convert__0__optimizations_to_new_format_ { get { return ResourceManager.GetString("OptimizationDb_ConvertFromOldFormat_Failed_to_convert__0__optimizations_to_new_fo" + "rmat_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimization Libraries. /// </summary> public static string OptimizationDb_FILTER_OPTDB_Optimization_Libraries { get { return ResourceManager.GetString("OptimizationDb_FILTER_OPTDB_Optimization_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library path cannot be null.. /// </summary> public static string OptimizationDb_GetOptimizationDb_Library_path_cannot_be_null_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_Library_path_cannot_be_null_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading optimization library {0}. /// </summary> public static string OptimizationDb_GetOptimizationDb_Loading_optimization_library__0_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_Loading_optimization_library__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be created. Perhaps you do not have sufficient privileges.. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_created__Perhaps_you_do_not_have_sufficient_privileges_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_created__Perhaps_you_d" + "o_not_have_sufficient_privileges_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be opened. {1}. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened___1_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} could not be opened (conversion from old format failed). {1}. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened__conversion_from_old_format_failed____1_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened__conversion_fro" + "m_old_format_failed____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} does not exist.. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_file__0__does_not_exist_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_file__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is not a valid optimization library file.. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_file__0__is_not_a_valid_optimization_library_file_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_file__0__is_not_a_valid_optimization_library" + "_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path containing {0} does not exist.. /// </summary> public static string OptimizationDb_GetOptimizationDb_The_path_containing__0__does_not_exist_ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_The_path_containing__0__does_not_exist_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You do not have privilieges to access the file {0}.. /// </summary> public static string OptimizationDb_GetOptimizationDb_You_do_not_have_privilieges_to_access_the_file__0__ { get { return ResourceManager.GetString("OptimizationDb_GetOptimizationDb_You_do_not_have_privilieges_to_access_the_file__" + "0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot compare OptimizationKey to an object of a different type. /// </summary> public static string OptimizationKey_CompareTo_Cannot_compare_OptimizationKey_to_an_object_of_a_different_type { get { return ResourceManager.GetString("OptimizationKey_CompareTo_Cannot_compare_OptimizationKey_to_an_object_of_a_differ" + "ent_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected use of optimization library before successful initialization.. /// </summary> public static string OptimizationLibrary_RequireUsable_Unexpected_use_of_optimization_library_before_successful_initialization_ { get { return ResourceManager.GetString("OptimizationLibrary_RequireUsable_Unexpected_use_of_optimization_library_before_s" + "uccessful_initialization_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimization Database:. /// </summary> public static string OptimizationLibraryList_Label_Optimization_Database { get { return ResourceManager.GetString("OptimizationLibraryList_Label_Optimization_Database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Optimization Databases. /// </summary> public static string OptimizationLibraryList_Title_Edit_Optimization_Databases { get { return ResourceManager.GetString("OptimizationLibraryList_Title_Edit_Optimization_Databases", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string OptimizedMethodTypeExtension_LOCALIZED_VALUES_None { get { return ResourceManager.GetString("OptimizedMethodTypeExtension_LOCALIZED_VALUES_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor. /// </summary> public static string OptimizedMethodTypeExtension_LOCALIZED_VALUES_Precursor { get { return ResourceManager.GetString("OptimizedMethodTypeExtension_LOCALIZED_VALUES_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition. /// </summary> public static string OptimizedMethodTypeExtension_LOCALIZED_VALUES_Transition { get { return ResourceManager.GetString("OptimizedMethodTypeExtension_LOCALIZED_VALUES_Transition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing scan time value on scan {0}. Scan times are required for overlap-based demultiplexing.. /// </summary> public static string OverlapDeconvSolverHandler_BuildDeconvBlock_Missing_scan_time_value_on_scan__0___Scan_times_are_required_for_overlap_based_demultiplexing_ { get { return ResourceManager.GetString("OverlapDeconvSolverHandler_BuildDeconvBlock_Missing_scan_time_value_on_scan__0___" + "Scan_times_are_required_for_overlap_based_demultiplexing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overlap deconvolution window scheme is rank deficient at scan {2}. Rank is {0} while matrix has dimension {1}. A non-degenerate overlapping window scheme is required.. /// </summary> public static string OverlapDeconvSolverHandler_BuildDeconvBlock_Overlap_deconvolution_window_scheme_is_rank_deficient_at_scan__2___Rank_is__0__while_matrix_has_dimension__1____A_non_degenerate_overlapping_window_scheme_is_required_ { get { return ResourceManager.GetString("OverlapDeconvSolverHandler_BuildDeconvBlock_Overlap_deconvolution_window_scheme_i" + "s_rank_deficient_at_scan__2___Rank_is__0__while_matrix_has_dimension__1____A_non" + "_degenerate_overlapping_window_scheme_is_required_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to insert slice of deconvolution matrix failed due to out of range error.. /// </summary> public static string OverlapDemultiplexer_attempt_to_insert_slice_of_deconvolution_matrix_failed_out_of_range { get { return ResourceManager.GetString("OverlapDemultiplexer_attempt_to_insert_slice_of_deconvolution_matrix_failed_out_o" + "f_range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to take slice of deconvolution matrix failed due to out of range error.. /// </summary> public static string OverlapDemultiplexer_attempt_to_take_slice_of_deconvolution_matrix_failed_out_of_range { get { return ResourceManager.GetString("OverlapDemultiplexer_attempt_to_take_slice_of_deconvolution_matrix_failed_out_of_" + "range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OverlapDemultiplexer:InitializeFile Improperly-formed overlap multiplexing file. /// </summary> public static string OverlapDemultiplexer_InitializeFile_OverlapDemultiplexer_InitializeFile_Improperly_formed_overlap_multiplexing_file { get { return ResourceManager.GetString("OverlapDemultiplexer_InitializeFile_OverlapDemultiplexer_InitializeFile_Improperl" + "y_formed_overlap_multiplexing_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Number of regions {0} in overlap demultiplexer approximation must be less than number of scans {1}.. /// </summary> public static string OverlapDemultiplexer_RowStart_Number_of_regions__0__in_overlap_demultiplexer_approximation_must_be_less_than_number_of_scans__1__ { get { return ResourceManager.GetString("OverlapDemultiplexer_RowStart_Number_of_regions__0__in_overlap_demultiplexer_appr" + "oximation_must_be_less_than_number_of_scans__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The overlapiIsolation window scheme is missing some contiguous isolation windows.. /// </summary> public static string OverlapDemultiplexer_the_isolation_window_overlap_scheme_does_not_cover_all_isolation_windows { get { return ResourceManager.GetString("OverlapDemultiplexer_the_isolation_window_overlap_scheme_does_not_cover_all_isola" + "tion_windows", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Panorama { get { object obj = ResourceManager.GetObject("Panorama", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to An unknown error occurred trying to verify access to Panorama folder {0} on the server {1}. ///{2}. /// </summary> public static string PanoramaHelper_ValidateFolder_ { get { return ResourceManager.GetString("PanoramaHelper_ValidateFolder_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unknown error occurred trying to verify Panorama server information. ///{0}. /// </summary> public static string PanoramaHelper_ValidateServer_ { get { return ResourceManager.GetString("PanoramaHelper_ValidateServer_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PanoramaPublish { get { object obj = ResourceManager.GetObject("PanoramaPublish", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Unknown error attempting to upload to Panorama. ///{0}. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_ { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred on the Panorama server ({0}) importing the file.. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_An_error_occurred_on_the_Panorama_server___0___importing_the_file_ { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_An_error_occurred_on_the_Panorama_serv" + "er___0___importing_the_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document import was cancelled on the Panorama server {0}.. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_Document_import_was_cancelled_on_the_Panorama_server__0__ { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_Document_import_was_cancelled_on_the_P" + "anorama_server__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error details can be found at {0}. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_Error_details_can_be_found_at__0_ { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_Error_details_can_be_found_at__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Job details can be found at {0}.. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_Job_details_can_be_found_at__0__ { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_Job_details_can_be_found_at__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Uploading document to Panorama. /// </summary> public static string PanoramaPublishHelper_PublishDocToPanorama_Uploading_document_to_Panorama { get { return ResourceManager.GetString("PanoramaPublishHelper_PublishDocToPanorama_Uploading_document_to_Panorama", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a Panorama folder. /// </summary> public static string PanoramaUtil_VerifyFolder__0__is_not_a_Panorama_folder { get { return ResourceManager.GetString("PanoramaUtil_VerifyFolder__0__is_not_a_Panorama_folder", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Folder {0} does not exist on the Panorama server {1}. /// </summary> public static string PanoramaUtil_VerifyFolder_Folder__0__does_not_exist_on_the_Panorama_server__1_ { get { return ResourceManager.GetString("PanoramaUtil_VerifyFolder_Folder__0__does_not_exist_on_the_Panorama_server__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to User {0} does not have permissions to upload to the Panorama folder {1}. /// </summary> public static string PanoramaUtil_VerifyFolder_User__0__does_not_have_permissions_to_upload_to_the_Panorama_folder__1_ { get { return ResourceManager.GetString("PanoramaUtil_VerifyFolder_User__0__does_not_have_permissions_to_upload_to_the_Pan" + "orama_folder__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} at position {1}. /// </summary> public static string ParseExceptionDetail_ToString__at_position__0_ { get { return ResourceManager.GetString("ParseExceptionDetail_ToString__at_position__0_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Paste { get { object obj = ResourceManager.GetObject("Paste", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to &apos;{0}&apos; is not a capital letter that corresponds to an amino acid.. /// </summary> public static string PasteDlg_AddFasta__0__is_not_a_capital_letter_that_corresponds_to_an_amino_acid { get { return ResourceManager.GetString("PasteDlg_AddFasta__0__is_not_a_capital_letter_that_corresponds_to_an_amino_acid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred:. /// </summary> public static string PasteDlg_AddFasta_An_unexpected_error_occurred { get { return ResourceManager.GetString("PasteDlg_AddFasta_An_unexpected_error_occurred", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred: {0} ({1}). /// </summary> public static string PasteDlg_AddFasta_An_unexpected_error_occurred__0__1__ { get { return ResourceManager.GetString("PasteDlg_AddFasta_An_unexpected_error_occurred__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is no name for this protein. /// </summary> public static string PasteDlg_AddFasta_There_is_no_name_for_this_protein { get { return ResourceManager.GetString("PasteDlg_AddFasta_There_is_no_name_for_this_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This must start with &apos;&gt;&apos;. /// </summary> public static string PasteDlg_AddFasta_This_must_start_with { get { return ResourceManager.GetString("PasteDlg_AddFasta_This_must_start_with", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OK. /// </summary> public static string PasteDlg_AddPeptides_OK { get { return ResourceManager.GetString("PasteDlg_AddPeptides_OK", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide sequence was not found in the protein sequence. /// </summary> public static string PasteDlg_AddPeptides_This_peptide_sequence_was_not_found_in_the_protein_sequence { get { return ResourceManager.GetString("PasteDlg_AddPeptides_This_peptide_sequence_was_not_found_in_the_protein_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to interpret peptide modifications. /// </summary> public static string PasteDlg_AddPeptides_Unable_to_interpret_peptide_modifications { get { return ResourceManager.GetString("PasteDlg_AddPeptides_Unable_to_interpret_peptide_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to use the Unimod definitions for the following modifications?. /// </summary> public static string PasteDlg_AddPeptides_Would_you_like_to_use_the_Unimod_definitions_for_the_following_modifications { get { return ResourceManager.GetString("PasteDlg_AddPeptides_Would_you_like_to_use_the_Unimod_definitions_for_the_followi" + "ng_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid protein sequence: {0}. /// </summary> public static string PasteDlg_AddProteins_Invalid_protein_sequence__0__ { get { return ResourceManager.GetString("PasteDlg_AddProteins_Invalid_protein_sequence__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing protein sequence. /// </summary> public static string PasteDlg_AddProteins_Missing_protein_sequence { get { return ResourceManager.GetString("PasteDlg_AddProteins_Missing_protein_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This protein was not found in the background proteome database.. /// </summary> public static string PasteDlg_AddProteins_This_protein_was_not_found_in_the_background_proteome_database { get { return ResourceManager.GetString("PasteDlg_AddProteins_This_protein_was_not_found_in_the_background_proteome_databa" + "se", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z must be a number.. /// </summary> public static string PasteDlg_AddTransitionList_The_precursor_m_z_must_be_a_number_ { get { return ResourceManager.GetString("PasteDlg_AddTransitionList_The_precursor_m_z_must_be_a_number_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The product m/z must be a number.. /// </summary> public static string PasteDlg_AddTransitionList_The_product_m_z_must_be_a_number_ { get { return ResourceManager.GetString("PasteDlg_AddTransitionList_The_product_m_z_must_be_a_number_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The easiest way to use this window is to copy from Excel (or any text editor if the data is CSV formatted) and paste into the grid. /// ///Note that you can adjust column order in Skyline by dragging the column headers left or right. /// ///Most peptide transition lists can be imported with the &quot;File|Import|Transition List...&quot; menu item or even pasted directly into the Targets window.. /// </summary> public static string PasteDlg_btnTransitionListHelp_Click_ { get { return ResourceManager.GetString("PasteDlg_btnTransitionListHelp_Click_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Notes on molecule ion formulas: /// ///If your transition list format combines formulas and adducts in a single column (e.g. &quot;C8H10N4O2[M+Na]&quot;) then use the &quot;Ion Formula&quot; columns, and disregard the &quot;Adduct&quot; columns. If your transition list puts the neutral formula and adduct in seperate columns, then use the &quot;Ion Formula&quot; columns for neutral formulas, and the &quot;Adduct&quot; columns for adducts.. /// </summary> public static string PasteDlg_btnTransitionListHelp_Click_2_ { get { return ResourceManager.GetString("PasteDlg_btnTransitionListHelp_Click_2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The easiest way to use this window is to copy from Excel (or any text editor if the data is CSV formatted) and paste into the grid. /// ///Note that you can adjust column order in Skyline by dragging the column headers left or right. For molecules, you can also select which columns to enable with the &quot;Columns...&quot; button. /// ///Most peptide transition lists can be imported with the &quot;File|Import|Transition List...&quot; menu item or even pasted directly into the Targets window. You can also do this with molecule tra [rest of string was truncated]&quot;;. /// </summary> public static string PasteDlg_btnTransitionListHelp_Click_SmallMol_ { get { return ResourceManager.GetString("PasteDlg_btnTransitionListHelp_Click_SmallMol_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Supported values for {0} are: {1}. /// </summary> public static string PasteDlg_btnTransitionListHelp_Click_Supported_values_for__0__are___1_ { get { return ResourceManager.GetString("PasteDlg_btnTransitionListHelp_Click_Supported_values_for__0__are___1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition List Help. /// </summary> public static string PasteDlg_btnTransitionListHelp_Click_Transition_List_Help { get { return ResourceManager.GetString("PasteDlg_btnTransitionListHelp_Click_Transition_List_Help", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is no sequence for this protein. /// </summary> public static string PasteDlg_CheckSequence_There_is_no_sequence_for_this_protein { get { return ResourceManager.GetString("PasteDlg_CheckSequence_There_is_no_sequence_for_this_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert. /// </summary> public static string PasteDlg_Description_Insert { get { return ResourceManager.GetString("PasteDlg_Description_Insert", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert FASTA. /// </summary> public static string PasteDlg_Description_Insert_FASTA { get { return ResourceManager.GetString("PasteDlg_Description_Insert_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert peptide list. /// </summary> public static string PasteDlg_Description_Insert_peptide_list { get { return ResourceManager.GetString("PasteDlg_Description_Insert_peptide_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert protein list. /// </summary> public static string PasteDlg_Description_Insert_protein_list { get { return ResourceManager.GetString("PasteDlg_Description_Insert_protein_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert transition list. /// </summary> public static string PasteDlg_Description_Insert_transition_list { get { return ResourceManager.GetString("PasteDlg_Description_Insert_transition_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule List {0}. /// </summary> public static string PasteDlg_GetMoleculePeptideGroup_Molecule_List__0_ { get { return ResourceManager.GetString("PasteDlg_GetMoleculePeptideGroup_Molecule_List__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/z {0} isn&apos;t close enough to the nearest possible m/z {1} (delta{2}).. /// </summary> public static string PasteDlg_GetMoleculeTransition_Product_m_z__0__isn_t_close_enough_to_the_nearest_possible_m_z__1___delta_2___ { get { return ResourceManager.GetString("PasteDlg_GetMoleculeTransition_Product_m_z__0__isn_t_close_enough_to_the_nearest_" + "possible_m_z__1___delta_2___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z {0} is not measureable with your current instrument settings.. /// </summary> public static string PasteDlg_GetMoleculeTransitionGroup_The_precursor_m_z__0__is_not_measureable_with_your_current_instrument_settings_ { get { return ResourceManager.GetString("PasteDlg_GetMoleculeTransitionGroup_The_precursor_m_z__0__is_not_measureable_with" + "_your_current_instrument_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide sequence cannot be blank.. /// </summary> public static string PasteDlg_ListPeptideSequences_The_peptide_sequence_cannot_be_blank { get { return ResourceManager.GetString("PasteDlg_ListPeptideSequences_The_peptide_sequence_cannot_be_blank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The structure of this crosslinked peptide is not supported by Skyline. /// </summary> public static string PasteDlg_ListPeptideSequences_The_structure_of_this_crosslinked_peptide_is_not_supported_by_Skyline { get { return ResourceManager.GetString("PasteDlg_ListPeptideSequences_The_structure_of_this_crosslinked_peptide_is_not_su" + "pported_by_Skyline", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This peptide sequence contains invalid characters.. /// </summary> public static string PasteDlg_ListPeptideSequences_This_peptide_sequence_contains_invalid_characters { get { return ResourceManager.GetString("PasteDlg_ListPeptideSequences_This_peptide_sequence_contains_invalid_characters", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Precursor m/z {1} does not agree with value {2} as calculated from ion formula and charge state (delta = {3}, Transition Settings | Instrument | Method match tolerance m/z = {4}). Correct the m/z value in the table, or leave it blank and Skyline will calculate it for you.. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Error_on_line__0___Precursor_m_z__1__does_not_agree_with_value__2__as_calculated_from_ion_formula_and_charge_state__delta____3___Transition_Settings___Instrument___Method_match_tolerance_m_z____4_____Correct_the_m_z_value_in_the_table__or_leave_it_blank_and_Skyline_will_calculate_it_for_you_ { get { return ResourceManager.GetString(@"PasteDlg_ReadPrecursorOrProductColumns_Error_on_line__0___Precursor_m_z__1__does_not_agree_with_value__2__as_calculated_from_ion_formula_and_charge_state__delta____3___Transition_Settings___Instrument___Method_match_tolerance_m_z____4_____Correct_the_m_z_value_in_the_table__or_leave_it_blank_and_Skyline_will_calculate_it_for_you_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Product m/z {1} does not agree with value {2} as calculated from ion formula and charge state (delta = {3}, Transition Settings | Instrument | Method match tolerance m/z = {4}). Correct the m/z value in the table, or leave it blank and Skyline will calculate it for you.. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Error_on_line__0___Product_m_z__1__does_not_agree_with_value__2__as_calculated_from_ion_formula_and_charge_state__delta____3___Transition_Settings___Instrument___Method_match_tolerance_m_z____4_____Correct_the_m_z_value_in_the_table__or_leave_it_blank_and_Skyline_will_calculate_it_for_you_ { get { return ResourceManager.GetString(@"PasteDlg_ReadPrecursorOrProductColumns_Error_on_line__0___Product_m_z__1__does_not_agree_with_value__2__as_calculated_from_ion_formula_and_charge_state__delta____3___Transition_Settings___Instrument___Method_match_tolerance_m_z____4_____Correct_the_m_z_value_in_the_table__or_leave_it_blank_and_Skyline_will_calculate_it_for_you_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid charge value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_charge_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_charge_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid collision energy value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_collision_energy_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_collision_energy_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid compensation voltage {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_compensation_voltage__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_compensation_voltage__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid cone voltage value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_cone_voltage_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_cone_voltage_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid declustering potential {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_declustering_potential__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_declustering_potential__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid drift time high energy offset value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_drift_time_high_energy_offset_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_drift_time_high_energy_offset_valu" + "e__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid drift time value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_drift_time_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_drift_time_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid m/z value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_m_z_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_m_z_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid retention time value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_retention_time_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_retention_time_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid retention time window value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_retention_time_window_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_retention_time_window_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid S-Lens value {0}. /// </summary> public static string PasteDlg_ReadPrecursorOrProductColumns_Invalid_S_Lens_value__0_ { get { return ResourceManager.GetString("PasteDlg_ReadPrecursorOrProductColumns_Invalid_S_Lens_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No errors. /// </summary> public static string PasteDlg_ShowNoErrors_No_errors { get { return ResourceManager.GetString("PasteDlg_ShowNoErrors_No_errors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collisional Cross Section (sq A). /// </summary> public static string PasteDlg_UpdateMoleculeType_Collisional_Cross_Section__sq_A_ { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Collisional_Cross_Section__sq_A_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cone Voltage. /// </summary> public static string PasteDlg_UpdateMoleculeType_Cone_Voltage { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Cone_Voltage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Collision Energy. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Collision_Energy { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Compensation Voltage. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Compensation_Voltage { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Compensation_Voltage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Declustering Potential. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Declustering_Potential { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Drift Time (msec). /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Drift_Time__msec_ { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Drift_Time__msec_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Drift Time High Energy Offset (msec). /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Drift_Time_High_Energy_Offset__msec_ { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Drift_Time_High_Energy_Offset__msec_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Ion Mobility. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Ion Mobility High Energy Offset. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility_High_Energy_Offset { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility_High_Energy_Offset", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Ion Mobility Units. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility_Units { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Ion_Mobility_Units", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Retention Time. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Retention_Time { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Explicit Retention Time Window. /// </summary> public static string PasteDlg_UpdateMoleculeType_Explicit_Retention_Time_Window { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Explicit_Retention_Time_Window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Label Type. /// </summary> public static string PasteDlg_UpdateMoleculeType_Label_Type { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Label_Type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule List Name. /// </summary> public static string PasteDlg_UpdateMoleculeType_Molecule_List_Name { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Molecule_List_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Note. /// </summary> public static string PasteDlg_UpdateMoleculeType_Note { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string PasteDlg_UpdateMoleculeType_Peptide { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Possible loss of data could occur if you switch to {0}. Do you want to continue?. /// </summary> public static string PasteDlg_UpdateMoleculeType_Possible_loss_of_data_could_occur_if_you_switch_to__0___Do_you_want_to_continue_ { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Possible_loss_of_data_could_occur_if_you_switch_to__0" + "___Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Adduct. /// </summary> public static string PasteDlg_UpdateMoleculeType_Precursor_Adduct { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Precursor_Adduct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Charge. /// </summary> public static string PasteDlg_UpdateMoleculeType_Precursor_Charge { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Precursor_Charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Formula. /// </summary> public static string PasteDlg_UpdateMoleculeType_Precursor_Formula { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Precursor_Formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string PasteDlg_UpdateMoleculeType_Precursor_m_z { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Precursor_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Name. /// </summary> public static string PasteDlg_UpdateMoleculeType_Precursor_Name { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Precursor_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product Adduct. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_Adduct { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_Adduct", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product Charge. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_Charge { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_Charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product Formula. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_Formula { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_Formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/z. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_m_z { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product Name. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_Name { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product Neutral Loss. /// </summary> public static string PasteDlg_UpdateMoleculeType_Product_Neutral_Loss { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Product_Neutral_Loss", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein description. /// </summary> public static string PasteDlg_UpdateMoleculeType_Protein_description { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Protein_description", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein name. /// </summary> public static string PasteDlg_UpdateMoleculeType_Protein_name { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_Protein_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to S-Lens. /// </summary> public static string PasteDlg_UpdateMoleculeType_S_Lens { get { return ResourceManager.GetString("PasteDlg_UpdateMoleculeType_S_Lens", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Precursor formula and m/z value do not agree for any charge state.. /// </summary> public static string PasteDlg_ValidateEntry_Error_on_line__0___Precursor_formula_and_m_z_value_do_not_agree_for_any_charge_state_ { get { return ResourceManager.GetString("PasteDlg_ValidateEntry_Error_on_line__0___Precursor_formula_and_m_z_value_do_not_" + "agree_for_any_charge_state_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Precursor needs values for any two of: Formula, m/z or Charge.. /// </summary> public static string PasteDlg_ValidateEntry_Error_on_line__0___Precursor_needs_values_for_any_two_of__Formula__m_z_or_Charge_ { get { return ResourceManager.GetString("PasteDlg_ValidateEntry_Error_on_line__0___Precursor_needs_values_for_any_two_of__" + "Formula__m_z_or_Charge_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Product formula and m/z value do not agree for any charge state.. /// </summary> public static string PasteDlg_ValidateEntry_Error_on_line__0___Product_formula_and_m_z_value_do_not_agree_for_any_charge_state_ { get { return ResourceManager.GetString("PasteDlg_ValidateEntry_Error_on_line__0___Product_formula_and_m_z_value_do_not_ag" + "ree_for_any_charge_state_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error on line {0}: Product needs values for any two of: Formula, m/z or Charge.. /// </summary> public static string PasteDlg_ValidateEntry_Error_on_line__0___Product_needs_values_for_any_two_of__Formula__m_z_or_Charge_ { get { return ResourceManager.GetString("PasteDlg_ValidateEntry_Error_on_line__0___Product_needs_values_for_any_two_of__Fo" + "rmula__m_z_or_Charge_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to filter them from the pasted list?. /// </summary> public static string PasteFilteredPeptidesDlg_Peptides_Do_you_want_to_filter_them_from_the_pasted_list { get { return ResourceManager.GetString("PasteFilteredPeptidesDlg_Peptides_Do_you_want_to_filter_them_from_the_pasted_list" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptides did not meet the current filter criteria:. /// </summary> public static string PasteFilteredPeptidesDlg_Peptides_The_following_peptides_did_not_meet_the_current_filter_criteria_ { get { return ResourceManager.GetString("PasteFilteredPeptidesDlg_Peptides_The_following_peptides_did_not_meet_the_current" + "_filter_criteria_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must select an empty directory for the tutorial files.. /// </summary> public static string PathChooserDlg_OkDialog_You_must_select_an_empty_directory_for_the_tutorial_files_ { get { return ResourceManager.GetString("PathChooserDlg_OkDialog_You_must_select_an_empty_directory_for_the_tutorial_files" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Across . /// </summary> public static string PcaPlot_UpdateGraph__Across_ { get { return ResourceManager.GetString("PcaPlot_UpdateGraph__Across_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to PCA on rows. /// </summary> public static string PcaPlot_UpdateGraph_PCA_on_rows { get { return ResourceManager.GetString("PcaPlot_UpdateGraph_PCA_on_rows", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Principal Component {0}. /// </summary> public static string PcaPlot_UpdateGraph_Principal_Component__0_ { get { return ResourceManager.GetString("PcaPlot_UpdateGraph_Principal_Component__0_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Peak { get { object obj = ResourceManager.GetObject("Peak", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeakBlank { get { object obj = ResourceManager.GetObject("PeakBlank", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Must select a model for comparison.. /// </summary> public static string PeakBoundaryCompareTest_DoTest_Must_select_a_model_for_comparison_ { get { return ResourceManager.GetString("PeakBoundaryCompareTest_DoTest_Must_select_a_model_for_comparison_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The first line does not contain any of the possible separators comma, tab or space.. /// </summary> public static string PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_any_of_the_possible_separators_comma__tab_or_space_ { get { return ResourceManager.GetString("PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_an" + "y_of_the_possible_separators_comma__tab_or_space_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The first line does not contain any of the possible separators semicolon, tab or space.. /// </summary> public static string PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_any_of_the_possible_separators_semicolon__tab_or_space_ { get { return ResourceManager.GetString("PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_an" + "y_of_the_possible_separators_semicolon__tab_or_space_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to find the necessary headers {0} in the first line. /// </summary> public static string PeakBoundaryImporter_Import_Failed_to_find_the_necessary_headers__0__in_the_first_line { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Failed_to_find_the_necessary_headers__0__in_the_first" + "_line", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to read the first line of the file. /// </summary> public static string PeakBoundaryImporter_Import_Failed_to_read_the_first_line_of_the_file { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Failed_to_read_the_first_line_of_the_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing Peak Boundaries. /// </summary> public static string PeakBoundaryImporter_Import_Importing_Peak_Boundaries { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Importing_Peak_Boundaries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Line {0} field count {1} differs from the first line, which has {2}. /// </summary> public static string PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line_" + "_which_has__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing end time on line {0}. /// </summary> public static string PeakBoundaryImporter_Import_Missing_end_time_on_line__0_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Missing_end_time_on_line__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing start time on line {0}. /// </summary> public static string PeakBoundaryImporter_Import_Missing_start_time_on_line__0_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Missing_start_time_on_line__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide has unrecognized modifications {0} at line {1}. /// </summary> public static string PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sample {0} on line {1} does not match the file {2}.. /// </summary> public static string PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The decoy value {0} on line {1} is invalid: must be 0 or 1.. /// </summary> public static string PeakBoundaryImporter_Import_The_decoy_value__0__on_line__1__is_invalid__must_be_0_or_1_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_The_decoy_value__0__on_line__1__is_invalid__must_be_0" + "_or_1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; on line {1} is not a valid charge state.. /// </summary> public static string PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_charge_state_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_charge_sta" + "te_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; on line {1} is not a valid end time.. /// </summary> public static string PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; on line {1} is not a valid start time.. /// </summary> public static string PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; on line {1} is not a valid time.. /// </summary> public static string PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_time_ { get { return ResourceManager.GetString("PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_time_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring these charge states?. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ignoring_these_charge_states_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ign" + "oring_these_charge_states_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring this charge state?. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ignoring_this_charge_state_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ign" + "oring_this_charge_state_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring this file?. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ignoring_this_file_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ign" + "oring_this_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring this peptide?. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ignoring_this_peptide_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_Continue_peak_boundary_import_ign" + "oring_this_peptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following {0} peptide, file, and charge state combinations were not recognized:. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following__0__peptide__file__and_charge_state_combinations_were_not_recognized_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following__0__peptide__file__" + "and_charge_state_combinations_were_not_recognized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following file name in the peak boundaries file was not recognized:. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_file_name_in_the_peak_boundaries_file_was_not_recognized_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_file_name_in_the_pe" + "ak_boundaries_file_was_not_recognized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptide, file, and charge state combination was not recognized:. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_peptide__file__and_charge_state_combination_was_not_recognized_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_peptide__file__and_" + "charge_state_combination_was_not_recognized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptide in the peak boundaries file was not recognized:. /// </summary> public static string PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_peptide_in_the_peak_boundaries_file_was_not_recognized_ { get { return ResourceManager.GetString("PeakBoundaryImporter_UnrecognizedPeptidesCancel_The_following_peptide_in_the_peak" + "_boundaries_file_was_not_recognized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to read a score annotation for peptide {0} of file {1}. /// </summary> public static string PeakBoundsMatch_PeakBoundsMatch_Unable_to_read_a_score_annotation_for_peptide__0__of_file__1_ { get { return ResourceManager.GetString("PeakBoundsMatch_PeakBoundsMatch_Unable_to_read_a_score_annotation_for_peptide__0_" + "_of_file__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to read apex retention time value for peptide {0} of file {1}.. /// </summary> public static string PeakBoundsMatch_PeakBoundsMatch_Unable_to_read_apex_retention_time_value_for_peptide__0__of_file__1__ { get { return ResourceManager.GetString("PeakBoundsMatch_PeakBoundsMatch_Unable_to_read_apex_retention_time_value_for_pept" + "ide__0__of_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to read q value annotation for peptide {0} of file {1}. /// </summary> public static string PeakBoundsMatch_QValue_Unable_to_read_q_value_annotation_for_peptide__0__of_file__1_ { get { return ResourceManager.GetString("PeakBoundsMatch_QValue_Unable_to_read_q_value_annotation_for_peptide__0__of_file_" + "_1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to On line {0}, {1}. /// </summary> public static string PeakCalculatorGridViewDriver_ValidateRow_On_line__0____1_ { get { return ResourceManager.GetString("PeakCalculatorGridViewDriver_ValidateRow_On_line__0____1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &apos;{0}&apos; is not a known name for a peak feature calculator. /// </summary> public static string PeakCalculatorWeight_Validate___0___is_not_a_known_name_for_a_peak_feature_calculator { get { return ResourceManager.GetString("PeakCalculatorWeight_Validate___0___is_not_a_known_name_for_a_peak_feature_calcul" + "ator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating peak group scores. /// </summary> public static string PeakFeatureEnumerator_GetPeakFeatures_Calculating_peak_group_scores { get { return ResourceManager.GetString("PeakFeatureEnumerator_GetPeakFeatures_Calculating_peak_group_scores", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Scoring Models. /// </summary> public static string PeakScoringModelList_Label_Peak_Scoring_Models { get { return ResourceManager.GetString("PeakScoringModelList_Label_Peak_Scoring_Models", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Peak Scoring Models. /// </summary> public static string PeakScoringModelList_Title_Edit_Peak_Scoring_Models { get { return ResourceManager.GetString("PeakScoringModelList_Title_Edit_Peak_Scoring_Models", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Peptide { get { object obj = ResourceManager.GetObject("Peptide", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Explicit retention time window requires an explicit retention time value.. /// </summary> public static string Peptide_ExplicitRetentionTimeWindow_Explicit_retention_time_window_requires_an_explicit_retention_time_value_ { get { return ResourceManager.GetString("Peptide_ExplicitRetentionTimeWindow_Explicit_retention_time_window_requires_an_ex" + "plicit_retention_time_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the molecule &apos;{0}&apos;?. /// </summary> public static string Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_molecule___0___ { get { return ResourceManager.GetString("Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_molecule___0___" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the peptide &apos;{0}&apos;?. /// </summary> public static string Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_peptide___0___ { get { return ResourceManager.GetString("Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_peptide___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} molecules?. /// </summary> public static string Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__molecules_ { get { return ResourceManager.GetString("Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__molecules" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} peptides?. /// </summary> public static string Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__peptides_ { get { return ResourceManager.GetString("Peptide_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__peptides_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT standards can only be changed by modifying the iRT calculator. /// </summary> public static string Peptide_StandardType_iRT_standards_can_only_be_changed_by_modifying_the_iRT_calculator { get { return ResourceManager.GetString("Peptide_StandardType_iRT_standards_can_only_be_changed_by_modifying_the_iRT_calcu" + "lator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Direct editing of this value is only supported for non-proteomic molecules.. /// </summary> public static string Peptide_ThrowIfNotSmallMolecule_Direct_editing_of_this_value_is_only_supported_for_small_molecules_ { get { return ResourceManager.GetString("Peptide_ThrowIfNotSmallMolecule_Direct_editing_of_this_value_is_only_supported_fo" + "r_small_molecules_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (missed {5}). /// </summary> public static string Peptide_ToString__missed__5__ { get { return ResourceManager.GetString("Peptide_ToString__missed__5__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (missed {0}). /// </summary> public static string Peptide_ToString_missed__0__ { get { return ResourceManager.GetString("Peptide_ToString_missed__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide sequence exceeds the bounds of the protein sequence.. /// </summary> public static string Peptide_Validate_Peptide_sequence_exceeds_the_bounds_of_the_protein_sequence { get { return ResourceManager.GetString("Peptide_Validate_Peptide_sequence_exceeds_the_bounds_of_the_protein_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides from protein sequences must have start and end values.. /// </summary> public static string Peptide_Validate_Peptides_from_protein_sequences_must_have_start_and_end_values { get { return ResourceManager.GetString("Peptide_Validate_Peptides_from_protein_sequences_must_have_start_and_end_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides without a protein sequence do not support the start and end properties.. /// </summary> public static string Peptide_Validate_Peptides_without_a_protein_sequence_do_not_support_the_start_and_end_properties { get { return ResourceManager.GetString("Peptide_Validate_Peptides_without_a_protein_sequence_do_not_support_the_start_and" + "_end_properties", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide sequence {0} does not agree with the protein sequence {1} at ({2}:{3}).. /// </summary> public static string Peptide_Validate_The_peptide_sequence__0__does_not_agree_with_the_protein_sequence__1__at__2__3__ { get { return ResourceManager.GetString("Peptide_Validate_The_peptide_sequence__0__does_not_agree_with_the_protein_sequenc" + "e__1__at__2__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string PeptideAnnotationPairFinder_DisplayName_Peptides { get { return ResourceManager.GetString("PeptideAnnotationPairFinder_DisplayName_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} CV. /// </summary> public static string PeptideAnnotationPairFinder_GetDisplayText__0__CV { get { return ResourceManager.GetString("PeptideAnnotationPairFinder_GetDisplayText__0__CV", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} CV in {1}. /// </summary> public static string PeptideAnnotationPairFinder_GetDisplayText__0__CV_in__1_ { get { return ResourceManager.GetString("PeptideAnnotationPairFinder_GetDisplayText__0__CV_in__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to process chromatograms for the molecule &apos;{0}&apos; because one chromatogram ends at time &apos;{1}&apos; and the other ends at time &apos;{2}&apos;.. /// </summary> public static string PeptideChromDataSets_AddDataSet_Unable_to_process_chromatograms_for_the_molecule___0___because_one_chromatogram_ends_at_time___1___and_the_other_ends_at_time___2___ { get { return ResourceManager.GetString("PeptideChromDataSets_AddDataSet_Unable_to_process_chromatograms_for_the_molecule_" + "__0___because_one_chromatogram_ends_at_time___1___and_the_other_ends_at_time___2" + "___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected null peak list. /// </summary> public static string PeptideChromDataSets_MergePeakGroups_Unexpected_null_peak_list { get { return ResourceManager.GetString("PeptideChromDataSets_MergePeakGroups_Unexpected_null_peak_list", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideDecoy { get { object obj = ResourceManager.GetObject("PeptideDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideDecoyLib { get { object obj = ResourceManager.GetObject("PeptideDecoyLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to iRT. /// </summary> public static string PeptideDocNode_GetStandardTypeDisplayName_iRT { get { return ResourceManager.GetString("PeptideDocNode_GetStandardTypeDisplayName_iRT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Global Standard. /// </summary> public static string PeptideDocNode_GetStandardTypeDisplayName_Normalization { get { return ResourceManager.GetString("PeptideDocNode_GetStandardTypeDisplayName_Normalization", resourceCulture); } } /// <summary> /// Looks up a localized string similar to QC. /// </summary> public static string PeptideDocNode_GetStandardTypeDisplayName_QC { get { return ResourceManager.GetString("PeptideDocNode_GetStandardTypeDisplayName_QC", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} (rank {1}). /// </summary> public static string PeptideDocNodeToString__0__rank__1__ { get { return ResourceManager.GetString("PeptideDocNodeToString__0__rank__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Exclusions:. /// </summary> public static string PeptideExcludeList_Label_Exclusions { get { return ResourceManager.GetString("PeptideExcludeList_Label_Exclusions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Exclusions. /// </summary> public static string PeptideExcludeList_Title_Edit_Exclusions { get { return ResourceManager.GetString("PeptideExcludeList_Title_Edit_Exclusions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide exclusion must have a regular expression.. /// </summary> public static string PeptideExcludeRegex_Validate_Peptide_exclusion_must_have_a_regular_expression { get { return ResourceManager.GetString("PeptideExcludeRegex_Validate_Peptide_exclusion_must_have_a_regular_expression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to excluded n-terminal amino acids. /// </summary> public static string PeptideFilter_DoValidate_excluded_n_terminal_amino_acids { get { return ResourceManager.GetString("PeptideFilter_DoValidate_excluded_n_terminal_amino_acids", resourceCulture); } } /// <summary> /// Looks up a localized string similar to maximum peptide length. /// </summary> public static string PeptideFilter_DoValidate_maximum_peptide_length { get { return ResourceManager.GetString("PeptideFilter_DoValidate_maximum_peptide_length", resourceCulture); } } /// <summary> /// Looks up a localized string similar to minimum peptide length. /// </summary> public static string PeptideFilter_DoValidate_minimum_peptide_length { get { return ResourceManager.GetString("PeptideFilter_DoValidate_minimum_peptide_length", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide exclusion {0} has an invalid regular expression &apos;{1}&apos;.. /// </summary> public static string PeptideFilter_DoValidate_The_peptide_exclusion__0__has_an_invalid_regular_expression__1__ { get { return ResourceManager.GetString("PeptideFilter_DoValidate_The_peptide_exclusion__0__has_an_invalid_regular_express" + "ion__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid exclusion list.. /// </summary> public static string PeptideFilter_ExcludeExprToRegEx_Invalid_exclusion_list { get { return ResourceManager.GetString("PeptideFilter_ExcludeExprToRegEx_Invalid_exclusion_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {1} for {0} must be between {2} and {3}.. /// </summary> public static string PeptideFilter_ValidateIntRange_The_value__1__for__0__must_be_between__2__and__3__ { get { return ResourceManager.GetString("PeptideFilter_ValidateIntRange_The_value__1__for__0__must_be_between__2__and__3__" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence &apos;{0}&apos; is already present in the list.. /// </summary> public static string PeptideGridViewDriver_DoCellValidating_The_sequence__0__is_already_present_in_the_list { get { return ResourceManager.GetString("PeptideGridViewDriver_DoCellValidating_The_sequence__0__is_already_present_in_the" + "_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid decimal number format {0} on line {1}. /// </summary> public static string PeptideGridViewDriver_ValidateRow_Invalid_decimal_number_format__0__on_line__1_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_Invalid_decimal_number_format__0__on_line__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing peptide sequence on line {0}. /// </summary> public static string PeptideGridViewDriver_ValidateRow_Missing_peptide_sequence_on_line__0_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_Missing_peptide_sequence_on_line__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing value on line {0}. /// </summary> public static string PeptideGridViewDriver_ValidateRow_Missing_value_on_line__0_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_Missing_value_on_line__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The pasted text must have two columns.. /// </summary> public static string PeptideGridViewDriver_ValidateRow_The_pasted_text_must_have_two_columns_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_The_pasted_text_must_have_two_columns_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The text {0} is not a valid peptide sequence on line {1}. /// </summary> public static string PeptideGridViewDriver_ValidateRow_The_text__0__is_not_a_valid_peptide_sequence_on_line__1_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_The_text__0__is_not_a_valid_peptide_sequence_on" + "_line__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The time {0} must be greater than zero on line {1}. /// </summary> public static string PeptideGridViewDriver_ValidateRow_The_time__0__must_be_greater_than_zero_on_line__1_ { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateRow_The_time__0__must_be_greater_than_zero_on_line_" + "_1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The added lists contains {0} peptides which already appear in the {1} list.. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_added_lists_contains__0__peptides_which_already_appear_in_the__1__list { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_added_lists_contains__0__peptide" + "s_which_already_appear_in_the__1__list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The added lists contains {0} peptides which appear multiple times.. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_added_lists_contains__0__peptides_which_appear_multiple_times { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_added_lists_contains__0__peptide" + "s_which_appear_multiple_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptides already appear in the {0} list:. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_following_peptides_already_appear_in_the__0__list { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_following_peptides_already_appea" + "r_in_the__0__list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptides appear multiple times in the added list:. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_following_peptides_appear_multiple_times_in_the_added_list { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_following_peptides_appear_multip" + "le_times_in_the_added_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide &apos;{0}&apos; already appears in the {1} list.. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_peptide__0__already_appears_in_the__1__list { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_peptide__0__already_appears_in_t" + "he__1__list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide &apos;{0}&apos; appears multiple times in the added list.. /// </summary> public static string PeptideGridViewDriver_ValidateUniquePeptides_The_peptide__0__appears_multiple_times_in_the_added_list { get { return ResourceManager.GetString("PeptideGridViewDriver_ValidateUniquePeptides_The_peptide__0__appears_multiple_tim" + "es_in_the_added_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to explain all transitions for {0} m/z {1} with a single set of modifications. /// </summary> public static string PeptideGroupBuilder_AppendTransition_Failed_to_explain_all_transitions_for_0__m_z__1__with_a_single_set_of_modifications { get { return ResourceManager.GetString("PeptideGroupBuilder_AppendTransition_Failed_to_explain_all_transitions_for_0__m_z" + "__1__with_a_single_set_of_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to explain all transitions for m/z {0} (peptide {1}) with a single precursor.. /// </summary> public static string PeptideGroupBuilder_AppendTransition_Failed_to_explain_all_transitions_for_m_z__0___peptide__1___with_a_single_precursor { get { return ResourceManager.GetString("PeptideGroupBuilder_AppendTransition_Failed_to_explain_all_transitions_for_m_z__0" + "___peptide__1___with_a_single_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide {0} was not found in the sequence {1}.. /// </summary> public static string PeptideGroupBuilder_AppendTransition_The_peptide__0__was_not_found_in_the_sequence__1__ { get { return ResourceManager.GetString("PeptideGroupBuilder_AppendTransition_The_peptide__0__was_not_found_in_the_sequenc" + "e__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing iRT value for peptide {0}, precursor m/z {1}.. /// </summary> public static string PeptideGroupBuilder_FinalizeTransitionGroups_Missing_iRT_value_for_peptide__0___precursor_m_z__1_ { get { return ResourceManager.GetString("PeptideGroupBuilder_FinalizeTransitionGroups_Missing_iRT_value_for_peptide__0___p" + "recursor_m_z__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Two transitions of the same precursor, {0} (m/z {1}) , have different iRT values, {2} and {3}. iRT values must be assigned consistently in an imported transition list.. /// </summary> public static string PeptideGroupBuilder_FinalizeTransitionGroups_Two_transitions_of_the_same_precursor___0___m_z__1_____have_different_iRT_values___2__and__3___iRT_values_must_be_assigned_consistently_in_an_imported_transition_list_ { get { return ResourceManager.GetString("PeptideGroupBuilder_FinalizeTransitionGroups_Two_transitions_of_the_same_precurso" + "r___0___m_z__1_____have_different_iRT_values___2__and__3___iRT_values_must_be_as" + "signed_consistently_in_an_imported_transition_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current document settings would cause the number of peptides to exceed {0:n0}. The document settings must be more restrictive or add fewer proteins.. /// </summary> public static string PeptideGroupDocNode_ChangeSettings_The_current_document_settings_would_cause_the_number_of_peptides_to_exceed__0_n0___The_document_settings_must_be_more_restrictive_or_add_fewer_proteins_ { get { return ResourceManager.GetString("PeptideGroupDocNode_ChangeSettings_The_current_document_settings_would_cause_the_" + "number_of_peptides_to_exceed__0_n0___The_document_settings_must_be_more_restrict" + "ive_or_add_fewer_proteins_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current document settings would cause the number of targeted transitions to exceed {0:n0}. The document settings must be more restrictive or add fewer proteins.. /// </summary> public static string PeptideGroupDocNode_ChangeSettings_The_current_document_settings_would_cause_the_number_of_targeted_transitions_to_exceed__0_n0___The_document_settings_must_be_more_restrictive_or_add_fewer_proteins_ { get { return ResourceManager.GetString("PeptideGroupDocNode_ChangeSettings_The_current_document_settings_would_cause_the_" + "number_of_targeted_transitions_to_exceed__0_n0___The_document_settings_must_be_m" + "ore_restrictive_or_add_fewer_proteins_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Peptides. /// </summary> public static string PeptideGroupTreeNode_ChildHeading__0__ { get { return ResourceManager.GetString("PeptideGroupTreeNode_ChildHeading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Molecules. /// </summary> public static string PeptideGroupTreeNode_ChildHeading__0__Molecules { get { return ResourceManager.GetString("PeptideGroupTreeNode_ChildHeading__0__Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} peptides. /// </summary> public static string PeptideGroupTreeNode_ChildUndoHeading__0__ { get { return ResourceManager.GetString("PeptideGroupTreeNode_ChildUndoHeading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} molecules. /// </summary> public static string PeptideGroupTreeNode_ChildUndoHeading__0__molecules { get { return ResourceManager.GetString("PeptideGroupTreeNode_ChildUndoHeading__0__molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule List. /// </summary> public static string PeptideGroupTreeNode_Heading_Molecule_List { get { return ResourceManager.GetString("PeptideGroupTreeNode_Heading_Molecule_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide List. /// </summary> public static string PeptideGroupTreeNode_Heading_Peptide_List { get { return ResourceManager.GetString("PeptideGroupTreeNode_Heading_Peptide_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein. /// </summary> public static string PeptideGroupTreeNode_Heading_Protein { get { return ResourceManager.GetString("PeptideGroupTreeNode_Heading_Protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;name: {0}&gt;. /// </summary> public static string PeptideGroupTreeNode_ProteinModalDisplayText__name___0__ { get { return ResourceManager.GetString("PeptideGroupTreeNode_ProteinModalDisplayText__name___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Accession. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Accession { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Accession", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Description. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Description { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Description", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Gene. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Gene { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Gene", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Name. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Name { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Original Description. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Original_Description { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Original_Description", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Original Name. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Original_Name { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Original_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preferred Name. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Preferred_Name { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Preferred_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Searched. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Searched { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Searched", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Species. /// </summary> public static string PeptideGroupTreeNode_RenderTip_Species { get { return ResourceManager.GetString("PeptideGroupTreeNode_RenderTip_Species", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideIrt { get { object obj = ResourceManager.GetObject("PeptideIrt", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideIrtLib { get { object obj = ResourceManager.GetObject("PeptideIrtLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideLib { get { object obj = ResourceManager.GetObject("PeptideLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Libraries and library specifications do not match.. /// </summary> public static string PeptideLibraries_DoValidate_Libraries_and_library_specifications_do_not_match_ { get { return ResourceManager.GetString("PeptideLibraries_DoValidate_Libraries_and_library_specifications_do_not_match_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library picked peptide count {0} must be between {1} and {2}.. /// </summary> public static string PeptideLibraries_DoValidate_Library_picked_peptide_count__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("PeptideLibraries_DoValidate_Library_picked_peptide_count__0__must_be_between__1__" + "and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Limiting peptides per protein requires a ranking method to be specified.. /// </summary> public static string PeptideLibraries_DoValidate_Limiting_peptides_per_protein_requires_a_ranking_method_to_be_specified { get { return ResourceManager.GetString("PeptideLibraries_DoValidate_Limiting_peptides_per_protein_requires_a_ranking_meth" + "od_to_be_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The specified method of matching library spectra does not support peptide ranking.. /// </summary> public static string PeptideLibraries_DoValidate_The_specified_method_of_matching_library_spectra_does_not_support_peptide_ranking { get { return ResourceManager.GetString("PeptideLibraries_DoValidate_The_specified_method_of_matching_library_spectra_does" + "_not_support_peptide_ranking", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Specified libraries do not support the &apos;{0}&apos; peptide ranking.. /// </summary> public static string PeptideLibraries_EnsureRankId_Specified_libraries_do_not_support_the___0___peptide_ranking { get { return ResourceManager.GetString("PeptideLibraries_EnsureRankId_Specified_libraries_do_not_support_the___0___peptid" + "e_ranking", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to serialize list containing invalid type.. /// </summary> public static string PeptideLibraries_WriteXml_Attempt_to_serialize_list_containing_invalid_type { get { return ResourceManager.GetString("PeptideLibraries_WriteXml_Attempt_to_serialize_list_containing_invalid_type", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideList { get { object obj = ResourceManager.GetObject("PeptideList", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Modification type {0} not found.. /// </summary> public static string PeptideModifications_ChangeModifications_Modification_type__0__not_found { get { return ResourceManager.GetString("PeptideModifications_ChangeModifications_Modification_type__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Maximum neutral losses {0} must be between {1} and {2}. /// </summary> public static string PeptideModifications_DoValidate_Maximum_neutral_losses__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("PeptideModifications_DoValidate_Maximum_neutral_losses__0__must_be_between__1__an" + "d__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Maximum variable modifications {0} must be between {1} and {2}. /// </summary> public static string PeptideModifications_DoValidate_Maximum_variable_modifications__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("PeptideModifications_DoValidate_Maximum_variable_modifications__0__must_be_betwee" + "n__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Heavy modifications found without &apos;{0}&apos; attribute.. /// </summary> public static string PeptideModifications_ReadXml_Heavy_modifications_found_without__0__attribute { get { return ResourceManager.GetString("PeptideModifications_ReadXml_Heavy_modifications_found_without__0__attribute", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Internal standard type {0} not found.. /// </summary> public static string PeptideModifications_ReadXml_Internal_standard_type__0__not_found { get { return ResourceManager.GetString("PeptideModifications_ReadXml_Internal_standard_type__0__not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating scheduling from trends requires a retention time window for measured data.. /// </summary> public static string PeptidePrediction_CalcMaxTrendReplicates_Calculating_scheduling_from_trends_requires_a_retention_time_window_for_measured_data { get { return ResourceManager.GetString("PeptidePrediction_CalcMaxTrendReplicates_Calculating_scheduling_from_trends_requi" + "res_a_retention_time_window_for_measured_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The retention time window {0} for a scheduled method based on measured results must be between {1} and {2}.. /// </summary> public static string PeptidePrediction_DoValidate_The_retention_time_window__0__for_a_scheduled_method_based_on_measured_results_must_be_between__1__and__2__ { get { return ResourceManager.GetString("PeptidePrediction_DoValidate_The_retention_time_window__0__for_a_scheduled_method" + "_based_on_measured_results_must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideQc { get { object obj = ResourceManager.GetObject("PeptideQc", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideQcLib { get { object obj = ResourceManager.GetObject("PeptideQcLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string PeptideRegressionTipProvider_RenderTip_Peptide { get { return ResourceManager.GetString("PeptideRegressionTipProvider_RenderTip_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have imported results in order to train a model.. /// </summary> public static string PeptideSettingsUI_comboPeakScoringModel_SelectedIndexChanged_The_document_must_have_imported_results_in_order_to_train_a_model_ { get { return ResourceManager.GetString("PeptideSettingsUI_comboPeakScoringModel_SelectedIndexChanged_The_document_must_ha" + "ve_imported_results_in_order_to_train_a_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unrecognized Model. /// </summary> public static string PeptideSettingsUI_ComboPeakScoringModelSelected_Unrecognized_Model { get { return ResourceManager.GetString("PeptideSettingsUI_ComboPeakScoringModelSelected_Unrecognized_Model", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to uncheck the ones that do not?. /// </summary> public static string PeptideSettingsUI_comboRank_SelectedIndexChanged_Do_you_want_to_uncheck_the_ones_that_do_not { get { return ResourceManager.GetString("PeptideSettingsUI_comboRank_SelectedIndexChanged_Do_you_want_to_uncheck_the_ones_" + "that_do_not", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not all libraries chosen support the &apos;{0}&apos; ranking for peptides.. /// </summary> public static string PeptideSettingsUI_comboRank_SelectedIndexChanged_Not_all_libraries_chosen_support_the__0__ranking_for_peptides { get { return ResourceManager.GetString("PeptideSettingsUI_comboRank_SelectedIndexChanged_Not_all_libraries_chosen_support" + "_the__0__ranking_for_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing peptide settings. /// </summary> public static string PeptideSettingsUI_OkDialog_Changing_peptide_settings { get { return ResourceManager.GetString("PeptideSettingsUI_OkDialog_Changing_peptide_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finishing up building library. /// </summary> public static string PeptideSettingsUI_ShowBuildLibraryDlg_Finishing_up_building_library { get { return ResourceManager.GetString("PeptideSettingsUI_ShowBuildLibraryDlg_Finishing_up_building_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0}. /// </summary> public static string PeptideSettingsUI_ShowFilterMidasDlg_Loading__0_ { get { return ResourceManager.GetString("PeptideSettingsUI_ShowFilterMidasDlg_Loading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading MIDAS Library. /// </summary> public static string PeptideSettingsUI_ShowFilterMidasDlg_Loading_MIDAS_Library { get { return ResourceManager.GetString("PeptideSettingsUI_ShowFilterMidasDlg_Loading_MIDAS_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Multiple MIDAS libraries in document. Select only one before filtering.. /// </summary> public static string PeptideSettingsUI_ShowFilterMidasDlg_Multiple_MIDAS_libraries_in_document__Select_only_one_before_filtering_ { get { return ResourceManager.GetString("PeptideSettingsUI_ShowFilterMidasDlg_Multiple_MIDAS_libraries_in_document__Select" + "_only_one_before_filtering_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide settings have been changed. Save changes?. /// </summary> public static string PeptideSettingsUI_ShowViewLibraryDlg_Peptide_settings_have_been_changed_Save_changes { get { return ResourceManager.GetString("PeptideSettingsUI_ShowViewLibraryDlg_Peptide_settings_have_been_changed_Save_chan" + "ges", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose at least one internal standard type.. /// </summary> public static string PeptideSettingsUI_ValidateNewSettings_Choose_at_least_one_internal_standard_type { get { return ResourceManager.GetString("PeptideSettingsUI_ValidateNewSettings_Choose_at_least_one_internal_standard_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to load background proteome {0}.. /// </summary> public static string PeptideSettingsUI_ValidateNewSettings_Failed_to_load_background_proteome__0__ { get { return ResourceManager.GetString("PeptideSettingsUI_ValidateNewSettings_Failed_to_load_background_proteome__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to In order to use the &apos;Bilinear turning point&apos; method of LOD calculation, &apos;Regression fit&apos; must be set to &apos;Bilinear&apos;.. /// </summary> public static string PeptideSettingsUI_ValidateNewSettings_In_order_to_use_the__Bilinear_turning_point__method_of_LOD_calculation___Regression_fit__must_be_set_to__Bilinear__ { get { return ResourceManager.GetString("PeptideSettingsUI_ValidateNewSettings_In_order_to_use_the__Bilinear_turning_point" + "__method_of_LOD_calculation___Regression_fit__must_be_set_to__Bilinear__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} is missing.. /// </summary> public static string PeptideSettingsUI_ValidateNewSettings_The_file__0__is_missing_ { get { return ResourceManager.GetString("PeptideSettingsUI_ValidateNewSettings_The_file__0__is_missing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} may not be a valid proteome file.. /// </summary> public static string PeptideSettingsUI_ValidateNewSettings_The_file__0__may_not_be_a_valid_proteome_file { get { return ResourceManager.GetString("PeptideSettingsUI_ValidateNewSettings_The_file__0__may_not_be_a_valid_proteome_fi" + "le", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating.... /// </summary> public static string PeptidesPerProteinDlg_UpdateRemaining_Calculating___ { get { return ResourceManager.GetString("PeptidesPerProteinDlg_UpdateRemaining_Calculating___", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideStandard { get { object obj = ResourceManager.GetObject("PeptideStandard", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PeptideStandardLib { get { object obj = ResourceManager.GetObject("PeptideStandardLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string PeptideTipProvider_RenderTip_Precursor_m_z { get { return ResourceManager.GetString("PeptideTipProvider_RenderTip_Precursor_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion adducts. /// </summary> public static string PeptideToMoleculeText_Ion_adducts { get { return ResourceManager.GetString("PeptideToMoleculeText_Ion_adducts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion charges. /// </summary> public static string PeptideToMoleculeText_Ion_charges { get { return ResourceManager.GetString("PeptideToMoleculeText_Ion_charges", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modified Peptide Sequence. /// </summary> public static string PeptideToMoleculeText_Modified_Peptide_Sequence { get { return ResourceManager.GetString("PeptideToMoleculeText_Modified_Peptide_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modified Sequence. /// </summary> public static string PeptideToMoleculeText_Modified_Sequence { get { return ResourceManager.GetString("PeptideToMoleculeText_Modified_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string PeptideToMoleculeText_Molecule { get { return ResourceManager.GetString("PeptideToMoleculeText_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule List. /// </summary> public static string PeptideToMoleculeText_Molecule_List { get { return ResourceManager.GetString("PeptideToMoleculeText_Molecule_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule Lists. /// </summary> public static string PeptideToMoleculeText_Molecule_Lists { get { return ResourceManager.GetString("PeptideToMoleculeText_Molecule_Lists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecules. /// </summary> public static string PeptideToMoleculeText_Molecules { get { return ResourceManager.GetString("PeptideToMoleculeText_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string PeptideToMoleculeText_Peptide { get { return ResourceManager.GetString("PeptideToMoleculeText_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide List. /// </summary> public static string PeptideToMoleculeText_Peptide_List { get { return ResourceManager.GetString("PeptideToMoleculeText_Peptide_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Sequence. /// </summary> public static string PeptideToMoleculeText_Peptide_Sequence { get { return ResourceManager.GetString("PeptideToMoleculeText_Peptide_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string PeptideToMoleculeText_Peptides { get { return ResourceManager.GetString("PeptideToMoleculeText_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein. /// </summary> public static string PeptideToMoleculeText_Protein { get { return ResourceManager.GetString("PeptideToMoleculeText_Protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteins. /// </summary> public static string PeptideToMoleculeText_Proteins { get { return ResourceManager.GetString("PeptideToMoleculeText_Proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Precursors. /// </summary> public static string PeptideTreeNode_ChildHeading__0__ { get { return ResourceManager.GetString("PeptideTreeNode_ChildHeading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} precursors. /// </summary> public static string PeptideTreeNode_ChildUndoHeading__0__ { get { return ResourceManager.GetString("PeptideTreeNode_ChildUndoHeading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string PeptideTreeNode_Heading_Title { get { return ResourceManager.GetString("PeptideTreeNode_Heading_Title", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string PeptideTreeNode_Heading_Title_Molecule { get { return ResourceManager.GetString("PeptideTreeNode_Heading_Title_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to First. /// </summary> public static string PeptideTreeNode_RenderTip_First { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_First", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Last. /// </summary> public static string PeptideTreeNode_RenderTip_Last { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Last", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Neutral Mass. /// </summary> public static string PeptideTreeNode_RenderTip_Neutral_Mass { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Neutral_Mass", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Next. /// </summary> public static string PeptideTreeNode_RenderTip_Next { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Next", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Previous. /// </summary> public static string PeptideTreeNode_RenderTip_Previous { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Previous", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rank. /// </summary> public static string PeptideTreeNode_RenderTip_Rank { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Rank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Source. /// </summary> public static string PeptideTreeNode_RenderTip_Source { get { return ResourceManager.GetString("PeptideTreeNode_RenderTip_Source", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Permute isotope modifications. /// </summary> public static string PermuteIsotopeModificationsDlg_OkDialog_Permute_isotope_modifications { get { return ResourceManager.GetString("PermuteIsotopeModificationsDlg_OkDialog_Permute_isotope_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Permuting Isotope Modifications. /// </summary> public static string PermuteIsotopeModificationsDlg_OkDialog_Permuting_Isotope_Modifications { get { return ResourceManager.GetString("PermuteIsotopeModificationsDlg_OkDialog_Permuting_Isotope_Modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating settings. /// </summary> public static string PermuteIsotopeModificationsDlg_OkDialog_Updating_settings { get { return ResourceManager.GetString("PermuteIsotopeModificationsDlg_OkDialog_Updating_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to External Tools. /// </summary> public static string PersistedViews_ExternalToolsGroup_External_Tools { get { return ResourceManager.GetString("PersistedViews_ExternalToolsGroup_External_Tools", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Main. /// </summary> public static string PersistedViews_MainGroup_Main { get { return ResourceManager.GetString("PersistedViews_MainGroup_Main", resourceCulture); } } /// <summary> /// Looks up a localized string similar to piecwise linear functions. /// </summary> public static string PiecewiseLinearRegressionFunction_GetRegressionDescription_piecwise_linear_functions { get { return ResourceManager.GetString("PiecewiseLinearRegressionFunction_GetRegressionDescription_piecwise_linear_functi" + "ons", resourceCulture); } } /// <summary> /// Looks up a localized string similar to AreaRatio. /// </summary> public static string Pivoter_QualifyColumnInfo_AreaRatio { get { return ResourceManager.GetString("Pivoter_QualifyColumnInfo_AreaRatio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to AreaRatioTo. /// </summary> public static string Pivoter_QualifyColumnInfo_AreaRatioTo { get { return ResourceManager.GetString("Pivoter_QualifyColumnInfo_AreaRatioTo", resourceCulture); } } /// <summary> /// Looks up a localized string similar to TotalAreaRatio. /// </summary> public static string Pivoter_QualifyColumnInfo_TotalAreaRatio { get { return ResourceManager.GetString("Pivoter_QualifyColumnInfo_TotalAreaRatio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to TotalAreaRatioTo. /// </summary> public static string Pivoter_QualifyColumnInfo_TotalAreaRatioTo { get { return ResourceManager.GetString("Pivoter_QualifyColumnInfo_TotalAreaRatioTo", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A report must have at least one column.. /// </summary> public static string PivotReportDlg_OkDialog_A_report_must_have_at_least_one_column { get { return ResourceManager.GetString("PivotReportDlg_OkDialog_A_report_must_have_at_least_one_column", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A report must have at least one column.. /// </summary> public static string PivotReportDlg_ShowPreview_A_report_must_have_at_least_one_column_ { get { return ResourceManager.GetString("PivotReportDlg_ShowPreview_A_report_must_have_at_least_one_column_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} has been modified, since it was first opened.. /// </summary> public static string PooledFileStream_Connect_The_file__0__has_been_modified_since_it_was_first_opened { get { return ResourceManager.GetString("PooledFileStream_Connect_The_file__0__has_been_modified_since_it_was_first_opened" + "", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap PopupBtn { get { object obj = ResourceManager.GetObject("PopupBtn", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Auto-select filtered {0}. /// </summary> public static string PopupPickList_UpdateAutoManageUI_Auto_select_filtered__0_ { get { return ResourceManager.GetString("PopupPickList_UpdateAutoManageUI_Auto_select_filtered__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to off. /// </summary> public static string PopupPickList_UpdateAutoManageUI_off { get { return ResourceManager.GetString("PopupPickList_UpdateAutoManageUI_off", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the precursor &apos;{0}&apos;?. /// </summary> public static string Precursor_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_precursor___0___ { get { return ResourceManager.GetString("Precursor_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_precursor___0" + "___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} precursors?. /// </summary> public static string Precursor_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__precursors_ { get { return ResourceManager.GetString("Precursor_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__precurs" + "ors_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Print { get { object obj = ResourceManager.GetObject("Print", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to A standard peptide was missing when trying to recalibrate.. /// </summary> public static string ProcessedIrtAverages_RecalibrateStandards_A_standard_peptide_was_missing_when_trying_to_recalibrate_ { get { return ResourceManager.GetString("ProcessedIrtAverages_RecalibrateStandards_A_standard_peptide_was_missing_when_try" + "ing_to_recalibrate_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error copying external tools from previous installation. /// </summary> public static string Program_CopyOldTools_Error_copying_external_tools_from_previous_installation { get { return ResourceManager.GetString("Program_CopyOldTools_Error_copying_external_tools_from_previous_installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Copying external tools from a previous installation. /// </summary> public static string Program_Main_Copying_external_tools_from_a_previous_installation { get { return ResourceManager.GetString("Program_Main_Copying_external_tools_from_a_previous_installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Install 32-bit {0}. /// </summary> public static string Program_Main_Install_32_bit__0__ { get { return ResourceManager.GetString("Program_Main_Install_32_bit__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You are attempting to run a 64-bit version of {0} on a 32-bit OS. Please install the 32-bit version.. /// </summary> public static string Program_Main_You_are_attempting_to_run_a_64_bit_version_of__0__on_a_32_bit_OS_Please_install_the_32_bit_version { get { return ResourceManager.GetString("Program_Main_You_are_attempting_to_run_a_64_bit_version_of__0__on_a_32_bit_OS_Ple" + "ase_install_the_32_bit_version", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ProgramPathCollectors must have a program name. /// </summary> public static string ProgramPathContainer_Validate_ProgramPathCollectors_must_have_a_program_name { get { return ResourceManager.GetString("ProgramPathContainer_Validate_ProgramPathCollectors_must_have_a_program_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Property: . /// </summary> public static string Property_DisambiguationPrefix_Property__ { get { return ResourceManager.GetString("Property_DisambiguationPrefix_Property__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap prosit_logo_dark_blue { get { object obj = ResourceManager.GetObject("prosit_logo_dark_blue", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Protein { get { object obj = ResourceManager.GetObject("Protein", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the molecule list &apos;{0}&apos;?. /// </summary> public static string Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_molecule_list___0___ { get { return ResourceManager.GetString("Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_molecule_list__" + "_0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the protein &apos;{0}&apos;?. /// </summary> public static string Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_protein___0___ { get { return ResourceManager.GetString("Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_protein___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} molecule lists?. /// </summary> public static string Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__molecule_lists_ { get { return ResourceManager.GetString("Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__molecule_" + "lists_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} proteins?. /// </summary> public static string Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__proteins_ { get { return ResourceManager.GetString("Protein_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__proteins_" + "", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ProteinDecoy { get { object obj = ResourceManager.GetObject("ProteinDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Resolving protein details. /// </summary> public static string ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details { get { return ResourceManager.GetString("ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ProteinUI { get { object obj = ResourceManager.GetObject("ProteinUI", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ProteoWizard { get { object obj = ResourceManager.GetObject("ProteoWizard", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Error retrieving server folders:. /// </summary> public static string PublishDocumentDlg_addSubFolders_Error_retrieving_server_folders { get { return ResourceManager.GetString("PublishDocumentDlg_addSubFolders_Error_retrieving_server_folders", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shared Files. /// </summary> public static string PublishDocumentDlg_btnBrowse_Click_Shared_Files { get { return ResourceManager.GetString("PublishDocumentDlg_btnBrowse_Click_Shared_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Shared Documents. /// </summary> public static string PublishDocumentDlg_btnBrowse_Click_Skyline_Shared_Documents { get { return ResourceManager.GetString("PublishDocumentDlg_btnBrowse_Click_Skyline_Shared_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upload Document. /// </summary> public static string PublishDocumentDlg_btnBrowse_Click_Upload_Document { get { return ResourceManager.GetString("PublishDocumentDlg_btnBrowse_Click_Upload_Document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a folder. /// </summary> public static string PublishDocumentDlg_OkDialog_Please_select_a_folder { get { return ResourceManager.GetString("PublishDocumentDlg_OkDialog_Please_select_a_folder", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a Skyline Shared file to upload.. /// </summary> public static string PublishDocumentDlg_OkDialog_Please_select_a_Skyline_Shared_file_to_upload { get { return ResourceManager.GetString("PublishDocumentDlg_OkDialog_Please_select_a_Skyline_Shared_file_to_upload", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Selected file is not a Skyline Shared file.. /// </summary> public static string PublishDocumentDlg_OkDialog_Selected_file_is_not_a_Skyline_Shared_file { get { return ResourceManager.GetString("PublishDocumentDlg_OkDialog_Selected_file_is_not_a_Skyline_Shared_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retrieving information from servers. /// </summary> public static string PublishDocumentDlg_PublishDocumentDlg_Load_Retrieving_information_on_servers { get { return ResourceManager.GetString("PublishDocumentDlg_PublishDocumentDlg_Load_Retrieving_information_on_servers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to retrieve information from the following servers:. /// </summary> public static string PublishDocumentDlg_PublishDocumentDlgLoad_Failed_attempting_to_retrieve_information_from_the_following_servers_ { get { return ResourceManager.GetString("PublishDocumentDlg_PublishDocumentDlgLoad_Failed_attempting_to_retrieve_informati" + "on_from_the_following_servers_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Go to Tools &gt; Options &gt; Panorama tab to update the username and password.. /// </summary> public static string PublishDocumentDlg_PublishDocumentDlgLoad_Go_to_Tools___Options___Panorama_tab_to_update_the_username_and_password_ { get { return ResourceManager.GetString("PublishDocumentDlg_PublishDocumentDlgLoad_Go_to_Tools___Options___Panorama_tab_to" + "_update_the_username_and_password_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected server does not support version {0} of the skyd file format. ///Please contact the Panorama server administrator to upgrade the server.. /// </summary> public static string PublishDocumentDlg_ServerSupportsSkydVersion_ { get { return ResourceManager.GetString("PublishDocumentDlg_ServerSupportsSkydVersion_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click here to view the error details.. /// </summary> public static string PublishDocumentDlg_UploadSharedZipFile_Click_here_to_view_the_error_details_ { get { return ResourceManager.GetString("PublishDocumentDlg_UploadSharedZipFile_Click_here_to_view_the_error_details_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error obtaining server information.. /// </summary> public static string PublishDocumentDlg_UploadSharedZipFile_Error_obtaining_server_information { get { return ResourceManager.GetString("PublishDocumentDlg_UploadSharedZipFile_Error_obtaining_server_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Uploading File. /// </summary> public static string PublishDocumentDlg_UploadSharedZipFile_Uploading_File { get { return ResourceManager.GetString("PublishDocumentDlg_UploadSharedZipFile_Uploading_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You do not have permission to upload to the given folder.. /// </summary> public static string PublishDocumentDlg_UploadSharedZipFile_You_do_not_have_permission_to_upload_to_the_given_folder { get { return ResourceManager.GetString("PublishDocumentDlg_UploadSharedZipFile_You_do_not_have_permission_to_upload_to_th" + "e_given_folder", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to download the following packages:. /// </summary> public static string PythonInstaller_DownloadPackages_Failed_to_download_the_following_packages_ { get { return ResourceManager.GetString("PythonInstaller_DownloadPackages_Failed_to_download_the_following_packages_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Download failed. Check your network connection or contact Skyline developers.. /// </summary> public static string PythonInstaller_DownloadPip_Download_failed__Check_your_network_connection_or_contact_Skyline_developers_ { get { return ResourceManager.GetString("PythonInstaller_DownloadPip_Download_failed__Check_your_network_connection_or_con" + "tact_Skyline_developers_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check your network connection or contact the tool provider for installation support.. /// </summary> public static string PythonInstaller_DownloadPython_Check_your_network_connection_or_contact_the_tool_provider_for_installation_support_ { get { return ResourceManager.GetString("PythonInstaller_DownloadPython_Check_your_network_connection_or_contact_the_tool_" + "provider_for_installation_support_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Download failed.. /// </summary> public static string PythonInstaller_DownloadPython_Download_failed_ { get { return ResourceManager.GetString("PythonInstaller_DownloadPython_Download_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing Packages. /// </summary> public static string PythonInstaller_GetPackages_Installing_Packages { get { return ResourceManager.GetString("PythonInstaller_GetPackages_Installing_Packages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Package installation completed.. /// </summary> public static string PythonInstaller_GetPackages_Package_installation_completed_ { get { return ResourceManager.GetString("PythonInstaller_GetPackages_Package_installation_completed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing Pip. /// </summary> public static string PythonInstaller_GetPip_Installing_Pip { get { return ResourceManager.GetString("PythonInstaller_GetPip_Installing_Pip", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing Python. /// </summary> public static string PythonInstaller_GetPython_Installing_Python { get { return ResourceManager.GetString("PythonInstaller_GetPython_Installing_Python", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Python installation completed.. /// </summary> public static string PythonInstaller_GetPython_Python_installation_completed_ { get { return ResourceManager.GetString("PythonInstaller_GetPython_Python_installation_completed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Install. /// </summary> public static string PythonInstaller_InstallPackages_Install { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Install", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Package installation failed. Error log output in immediate window.. /// </summary> public static string PythonInstaller_InstallPackages_Package_installation_failed__Error_log_output_in_immediate_window_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Package_installation_failed__Error_log_output_in_" + "immediate_window_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Package Installation was not completed. Canceling tool installation.. /// </summary> public static string PythonInstaller_InstallPackages_Package_Installation_was_not_completed__Canceling_tool_installation_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Package_Installation_was_not_completed__Canceling" + "_tool_installation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pip installation complete.. /// </summary> public static string PythonInstaller_InstallPackages_Pip_installation_complete_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Pip_installation_complete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Python package installation cannot continue. Canceling tool installation.. /// </summary> public static string PythonInstaller_InstallPackages_Python_package_installation_cannot_continue__Canceling_tool_installation_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Python_package_installation_cannot_continue__Canc" + "eling_tool_installation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline uses the Python tool setuptools and the Python package manager Pip to install packages from source. Click install to begin the installation process.. /// </summary> public static string PythonInstaller_InstallPackages_Skyline_uses_the_Python_tool_setuptools_and_the_Python_package_manager_Pip_to_install_packages_from_source__Click_install_to_begin_the_installation_process_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Skyline_uses_the_Python_tool_setuptools_and_the_P" + "ython_package_manager_Pip_to_install_packages_from_source__Click_install_to_begi" + "n_the_installation_process_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown error installing packages.. /// </summary> public static string PythonInstaller_InstallPackages_Unknown_error_installing_packages_ { get { return ResourceManager.GetString("PythonInstaller_InstallPackages_Unknown_error_installing_packages_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pip installation failed. Error log output in immediate window.. /// </summary> public static string PythonInstaller_InstallPip_Pip_installation_failed__Error_log_output_in_immediate_window__ { get { return ResourceManager.GetString("PythonInstaller_InstallPip_Pip_installation_failed__Error_log_output_in_immediate" + "_window__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown error installing pip.. /// </summary> public static string PythonInstaller_InstallPip_Unknown_error_installing_pip_ { get { return ResourceManager.GetString("PythonInstaller_InstallPip_Unknown_error_installing_pip_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Python installation failed. Canceling tool installation.. /// </summary> public static string PythonInstaller_InstallPython_Python_installation_failed__Canceling_tool_installation_ { get { return ResourceManager.GetString("PythonInstaller_InstallPython_Python_installation_failed__Canceling_tool_installa" + "tion_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires Python {0}. Click install to begin the installation process.. /// </summary> public static string PythonInstaller_PythonInstaller_Load_This_tool_requires_Python__0___Click_install_to_begin_the_installation_process_ { get { return ResourceManager.GetString("PythonInstaller_PythonInstaller_Load_This_tool_requires_Python__0___Click_install" + "_to_begin_the_installation_process_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires Python {0} and the following packages. Select packages to install and then click Install to begin the installation process.. /// </summary> public static string PythonInstaller_PythonInstaller_Load_This_tool_requires_Python__0__and_the_following_packages__Select_packages_to_install_and_then_click_Install_to_begin_the_installation_process_ { get { return ResourceManager.GetString("PythonInstaller_PythonInstaller_Load_This_tool_requires_Python__0__and_the_follow" + "ing_packages__Select_packages_to_install_and_then_click_Install_to_begin_the_ins" + "tallation_process_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires the following Python packages. Select packages to install and then click Install to begin the installation process.. /// </summary> public static string PythonInstaller_PythonInstaller_Load_This_tool_requires_the_following_Python_packages__Select_packages_to_install_and_then_click_Install_to_begin_the_installation_process_ { get { return ResourceManager.GetString("PythonInstaller_PythonInstaller_Load_This_tool_requires_the_following_Python_pack" + "ages__Select_packages_to_install_and_then_click_Install_to_begin_the_installatio" + "n_process_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio {0} To {1}. /// </summary> public static string RatioPropertyAccessor_PeptideProperty_Ratio__0__To__1_ { get { return ResourceManager.GetString("RatioPropertyAccessor_PeptideProperty_Ratio__0__To__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio {0} To Global Standards. /// </summary> public static string RatioPropertyAccessor_PeptideRatioProperty_Ratio__0__To_Global_Standards { get { return ResourceManager.GetString("RatioPropertyAccessor_PeptideRatioProperty_Ratio__0__To_Global_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio To Global Standards. /// </summary> public static string RatioPropertyAccessor_PeptideRatioProperty_Ratio_To_Global_Standards { get { return ResourceManager.GetString("RatioPropertyAccessor_PeptideRatioProperty_Ratio_To_Global_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Area Ratio To {0}. /// </summary> public static string RatioPropertyAccessor_PrecursorRatioProperty_Total_Area_Ratio_To__0_ { get { return ResourceManager.GetString("RatioPropertyAccessor_PrecursorRatioProperty_Total_Area_Ratio_To__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Area Ratio To Global Standards. /// </summary> public static string RatioPropertyAccessor_PrecursorRatioProperty_Total_Area_Ratio_To_Global_Standards { get { return ResourceManager.GetString("RatioPropertyAccessor_PrecursorRatioProperty_Total_Area_Ratio_To_Global_Standards" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Area Ratio To {0}. /// </summary> public static string RatioPropertyAccessor_TransitionRatioProperty_Area_Ratio_To__0_ { get { return ResourceManager.GetString("RatioPropertyAccessor_TransitionRatioProperty_Area_Ratio_To__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Area Ratio To Global Standards. /// </summary> public static string RatioPropertyAccessor_TransitionRatioProperty_Area_Ratio_To_Global_Standards { get { return ResourceManager.GetString("RatioPropertyAccessor_TransitionRatioProperty_Area_Ratio_To_Global_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio to surrogate {0}. /// </summary> public static string RatioToSurrogate_ToString_Ratio_to_surrogate__0_ { get { return ResourceManager.GetString("RatioToSurrogate_ToString_Ratio_to_surrogate__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio to surrogate {0} ({1}). /// </summary> public static string RatioToSurrogate_ToString_Ratio_to_surrogate__0____1__ { get { return ResourceManager.GetString("RatioToSurrogate_ToString_Ratio_to_surrogate__0____1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected use of iRT calculator before successful initialization.. /// </summary> public static string RCalcIrt_RequireUsable_Unexpected_use_of_iRT_calculator_before_successful_initialization { get { return ResourceManager.GetString("RCalcIrt_RequireUsable_Unexpected_use_of_iRT_calculator_before_successful_initial" + "ization", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected use of iRT calculator before successful initialization.. /// </summary> public static string RCalcIrt_RequireUsable_Unexpected_use_of_iRT_calculator_before_successful_initialization_ { get { return ResourceManager.GetString("RCalcIrt_RequireUsable_Unexpected_use_of_iRT_calculator_before_successful_initial" + "ization_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Dot Product {0} To {1}. /// </summary> public static string RDotPPropertyAccessor_PeptideProperty_Dot_Product__0__To__1_ { get { return ResourceManager.GetString("RDotPPropertyAccessor_PeptideProperty_Dot_Product__0__To__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Dot Product To {0}. /// </summary> public static string RDotPPropertyAccessor_PrecursorProperty_Dot_Product_To__0_ { get { return ResourceManager.GetString("RDotPPropertyAccessor_PrecursorProperty_Dot_Product_To__0_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap RedX { get { object obj = ResourceManager.GetObject("RedX", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap RedXTransparentBackground { get { object obj = ResourceManager.GetObject("RedXTransparentBackground", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Add la&amp;bel type:. /// </summary> public static string RefineDlg_cbAdd_CheckedChanged_Add_label_type { get { return ResourceManager.GetString("RefineDlg_cbAdd_CheckedChanged_Add_label_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursors of the chosen isotope label type will be added if they are missing. /// </summary> public static string RefineDlg_cbAdd_CheckedChanged_Precursors_of_the_chosen_isotope_label_type_will_be_added_if_they_are_missing { get { return ResourceManager.GetString("RefineDlg_cbAdd_CheckedChanged_Precursors_of_the_chosen_isotope_label_type_will_b" + "e_added_if_they_are_missing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1. /// </summary> public static string RefineDlg_MSLevel_1 { get { return ResourceManager.GetString("RefineDlg_MSLevel_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 2. /// </summary> public static string RefineDlg_MSLevel_2 { get { return ResourceManager.GetString("RefineDlg_MSLevel_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total ion current. /// </summary> public static string RefineDlg_NormalizationMethod_Total_ion_current { get { return ResourceManager.GetString("RefineDlg_NormalizationMethod_Total_ion_current", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must be less than min peak found ratio.. /// </summary> public static string RefineDlg_OkDialog__0__must_be_less_than_min_peak_found_ratio { get { return ResourceManager.GetString("RefineDlg_OkDialog__0__must_be_less_than_min_peak_found_ratio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The label type &apos;{0}&apos; cannot be added. There are no modifications for this type.. /// </summary> public static string RefineDlg_OkDialog_The_label_type__0__cannot_be_added_There_are_no_modifications_for_this_type { get { return ResourceManager.GetString("RefineDlg_OkDialog_The_label_type__0__cannot_be_added_There_are_no_modifications_" + "for_this_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to all. /// </summary> public static string RefineDlg_RefineDlg_all { get { return ResourceManager.GetString("RefineDlg_RefineDlg_all", resourceCulture); } } /// <summary> /// Looks up a localized string similar to best. /// </summary> public static string RefineDlg_RefineDlg_best { get { return ResourceManager.GetString("RefineDlg_RefineDlg_best", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursors. /// </summary> public static string RefineDlg_RefineDlg_Precursors { get { return ResourceManager.GetString("RefineDlg_RefineDlg_Precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Products. /// </summary> public static string RefineDlg_RefineDlg_Products { get { return ResourceManager.GetString("RefineDlg_RefineDlg_Products", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue?. /// </summary> public static string RefineListDlg_OkDialog_Do_you_want_to_continue { get { return ResourceManager.GetString("RefineListDlg_OkDialog_Do_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None of the specified peptides are in the document.. /// </summary> public static string RefineListDlg_OkDialog_None_of_the_specified_peptides_are_in_the_document { get { return ResourceManager.GetString("RefineListDlg_OkDialog_None_of_the_specified_peptides_are_in_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Of the specified {0} peptides {1} are not in the document. Do you want to continue?. /// </summary> public static string RefineListDlg_OkDialog_Of_the_specified__0__peptides__1__are_not_in_the_document_Do_you_want_to_continue { get { return ResourceManager.GetString("RefineListDlg_OkDialog_Of_the_specified__0__peptides__1__are_not_in_the_document_" + "Do_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following peptides are not in the document:. /// </summary> public static string RefineListDlg_OkDialog_The_following_peptides_are_not_in_the_document { get { return ResourceManager.GetString("RefineListDlg_OkDialog_The_following_peptides_are_not_in_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following sequences are not valid peptides:. /// </summary> public static string RefineListDlg_OkDialog_The_following_sequences_are_not_valid_peptides { get { return ResourceManager.GetString("RefineListDlg_OkDialog_The_following_sequences_are_not_valid_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide &apos;{0}&apos; is not in the document.. /// </summary> public static string RefineListDlg_OkDialog_The_peptide___0___is_not_in_the_document { get { return ResourceManager.GetString("RefineListDlg_OkDialog_The_peptide___0___is_not_in_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide &apos;{0}&apos; is not in the document. Do you want to continue?. /// </summary> public static string RefineListDlg_OkDialog_The_peptide__0__is_not_in_the_document_Do_you_want_to_continue { get { return ResourceManager.GetString("RefineListDlg_OkDialog_The_peptide__0__is_not_in_the_document_Do_you_want_to_cont" + "inue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence &apos;{0}&apos; is not a valid peptide.. /// </summary> public static string RefineListDlg_OkDialog_The_sequence__0__is_not_a_valid_peptide { get { return ResourceManager.GetString("RefineListDlg_OkDialog_The_sequence__0__is_not_a_valid_peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue?. /// </summary> public static string RefineListDlgProtein_OkDialog_Do_you_want_to_continue { get { return ResourceManager.GetString("RefineListDlgProtein_OkDialog_Do_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None of the specified proteins are in the document.. /// </summary> public static string RefineListDlgProtein_OkDialog_None_of_the_specified_proteins_are_in_the_document_ { get { return ResourceManager.GetString("RefineListDlgProtein_OkDialog_None_of_the_specified_proteins_are_in_the_document_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Of the specified {0} proteins {1} are not in the document. Do you want to continue?. /// </summary> public static string RefineListDlgProtein_OkDialog_Of_the_specified__0__proteins__1__are_not_in_the_document__Do_you_want_to_continue_ { get { return ResourceManager.GetString("RefineListDlgProtein_OkDialog_Of_the_specified__0__proteins__1__are_not_in_the_do" + "cument__Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following proteins are not in the document:. /// </summary> public static string RefineListDlgProtein_OkDialog_The_following_proteins_are_not_in_the_document_ { get { return ResourceManager.GetString("RefineListDlgProtein_OkDialog_The_following_proteins_are_not_in_the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The protein &apos;{0}&apos; is not in the document. Do you want to continue?. /// </summary> public static string RefineListDlgProtein_OkDialog_The_protein___0___is_not_in_the_document__Do_you_want_to_continue_ { get { return ResourceManager.GetString("RefineListDlgProtein_OkDialog_The_protein___0___is_not_in_the_document__Do_you_wa" + "nt_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Converted To Molecules. /// </summary> public static string RefinementSettings_ConvertToSmallMolecules_Converted_To_Small_Molecules { get { return ResourceManager.GetString("RefinementSettings_ConvertToSmallMolecules_Converted_To_Small_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not contain the given reference type.. /// </summary> public static string RefinementSettings_GetLabelIndex_The_document_does_not_contain_the_given_reference_type_ { get { return ResourceManager.GetString("RefinementSettings_GetLabelIndex_The_document_does_not_contain_the_given_referenc" + "e_type_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not have a global standard to normalize by.. /// </summary> public static string RefinementSettings_Refine_The_document_does_not_have_a_global_standard_to_normalize_by_ { get { return ResourceManager.GetString("RefinementSettings_Refine_The_document_does_not_have_a_global_standard_to_normali" + "ze_by_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain at least 2 replicates to refine based on consistency.. /// </summary> public static string RefinementSettings_Refine_The_document_must_contain_at_least_2_replicates_to_refine_based_on_consistency_ { get { return ResourceManager.GetString("RefinementSettings_Refine_The_document_must_contain_at_least_2_replicates_to_refi" + "ne_based_on_consistency_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to intercept. /// </summary> public static string Regression_intercept { get { return ResourceManager.GetString("Regression_intercept", resourceCulture); } } /// <summary> /// Looks up a localized string similar to slope. /// </summary> public static string Regression_slope { get { return ResourceManager.GetString("Regression_slope", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Bilinear. /// </summary> public static string RegressionFit_BILINEAR_Bilinear { get { return ResourceManager.GetString("RegressionFit_BILINEAR_Bilinear", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} (at {1} points minimum). /// </summary> public static string RegressionGraphPane_RegressionGraphPane__0___at__1__points_minimum_ { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane__0___at__1__points_minimum_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Current. /// </summary> public static string RegressionGraphPane_RegressionGraphPane_Current { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane_Current", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing. /// </summary> public static string RegressionGraphPane_RegressionGraphPane_Missing { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane_Missing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Outliers. /// </summary> public static string RegressionGraphPane_RegressionGraphPane_Outliers { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane_Outliers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Regression. /// </summary> public static string RegressionGraphPane_RegressionGraphPane_Regression { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Values. /// </summary> public static string RegressionGraphPane_RegressionGraphPane_Values { get { return ResourceManager.GetString("RegressionGraphPane_RegressionGraphPane_Values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixed points (linear). /// </summary> public static string RegressionOption_All_Fixed_points__linear_ { get { return ResourceManager.GetString("RegressionOption_All_Fixed_points__linear_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fixed points (logarithmic). /// </summary> public static string RegressionOption_All_Fixed_points__logarithmic_ { get { return ResourceManager.GetString("RegressionOption_All_Fixed_points__logarithmic_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Score. /// </summary> public static string RegressionUnconversion_CalculatorScoreFormat { get { return ResourceManager.GetString("RegressionUnconversion_CalculatorScoreFormat", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Score ({1}). /// </summary> public static string RegressionUnconversion_CalculatorScoreValueFormat { get { return ResourceManager.GetString("RegressionUnconversion_CalculatorScoreValueFormat", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to reintegrate peaks.. /// </summary> public static string ReintegrateDlg_OkDialog_Failed_attempting_to_reintegrate_peaks_ { get { return ResourceManager.GetString("ReintegrateDlg_OkDialog_Failed_attempting_to_reintegrate_peaks_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reintegrating. /// </summary> public static string ReintegrateDlg_OkDialog_Reintegrating { get { return ResourceManager.GetString("ReintegrateDlg_OkDialog_Reintegrating", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The current peak scoring model is incompatible with one or more peptides in the document. Please train a new model.. /// </summary> public static string ReintegrateDlg_OkDialog_The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document___Please_train_a_new_model_ { get { return ResourceManager.GetString("ReintegrateDlg_OkDialog_The_current_peak_scoring_model_is_incompatible_with_one_o" + "r_more_peptides_in_the_document___Please_train_a_new_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must train and select a model in order to reintegrate peaks.. /// </summary> public static string ReintegrateDlg_OkDialog_You_must_train_and_select_a_model_in_order_to_reintegrate_peaks_ { get { return ResourceManager.GetString("ReintegrateDlg_OkDialog_You_must_train_and_select_a_model_in_order_to_reintegrate" + "_peaks_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching. /// </summary> public static string RelativeRTExtension_LOCALIZED_VALUES_Matching { get { return ResourceManager.GetString("RelativeRTExtension_LOCALIZED_VALUES_Matching", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Overlapping. /// </summary> public static string RelativeRTExtension_LOCALIZED_VALUES_Overlapping { get { return ResourceManager.GetString("RelativeRTExtension_LOCALIZED_VALUES_Overlapping", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Preceding. /// </summary> public static string RelativeRTExtension_LOCALIZED_VALUES_Preceding { get { return ResourceManager.GetString("RelativeRTExtension_LOCALIZED_VALUES_Preceding", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown. /// </summary> public static string RelativeRTExtension_LOCALIZED_VALUES_Unknown { get { return ResourceManager.GetString("RelativeRTExtension_LOCALIZED_VALUES_Unknown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remote Accounts. /// </summary> public static string RemoteAccountList_Label_Remote_Accounts { get { return ResourceManager.GetString("RemoteAccountList_Label_Remote_Accounts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Remote Accounts. /// </summary> public static string RemoteAccountList_Title_Edit_Remote_Accounts { get { return ResourceManager.GetString("RemoteAccountList_Title_Edit_Remote_Accounts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There was an error communicating with the server: . /// </summary> public static string RemoteSession_FetchContents_There_was_an_error_communicating_with_the_server__ { get { return ResourceManager.GetString("RemoteSession_FetchContents_There_was_an_error_communicating_with_the_server__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peaks are selected. /// </summary> public static string RemovePeaksAction_RemovePeaks_No_peaks_are_selected { get { return ResourceManager.GetString("RemovePeaksAction_RemovePeaks_No_peaks_are_selected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove peaks. /// </summary> public static string RemovePeaksAction_RemovePeaks_Remove_peaks { get { return ResourceManager.GetString("RemovePeaksAction_RemovePeaks_Remove_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Removing Peaks. /// </summary> public static string RemovePeaksAction_RemovePeaks_Removing_Peaks { get { return ResourceManager.GetString("RemovePeaksAction_RemovePeaks_Removing_Peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from {1} molecules?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from__1__molecules_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__" + "peaks_from__1__molecules_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from {1} peptides?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from__1__peptides_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__" + "peaks_from__1__peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from one molecule?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from_one_molecule_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__" + "peaks_from_one_molecule_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from one peptide?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from_one_peptide_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__" + "peaks_from_one_peptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove this molecule peak?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_molecule_peak_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_molec" + "ule_peak_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove this peptide peak?. /// </summary> public static string RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_peptide_peak_ { get { return ResourceManager.GetString("RemovePeptides_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_pepti" + "de_peak_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove Molecule Peaks.... /// </summary> public static string RemovePeptides_MenuItemText_Remove_Molecule_Peaks___ { get { return ResourceManager.GetString("RemovePeptides_MenuItemText_Remove_Molecule_Peaks___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove Peptide Peaks.... /// </summary> public static string RemovePeptides_MenuItemText_Remove_Peptide_Peaks___ { get { return ResourceManager.GetString("RemovePeptides_MenuItemText_Remove_Peptide_Peaks___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from {1} precursors?. /// </summary> public static string RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from__1__precursors_ { get { return ResourceManager.GetString("RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0" + "__peaks_from__1__precursors_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from one precursor?. /// </summary> public static string RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from_one_precursor_ { get { return ResourceManager.GetString("RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0" + "__peaks_from_one_precursor_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove this precursor peak?. /// </summary> public static string RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_precursor_peak_ { get { return ResourceManager.GetString("RemovePrecursors_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_pre" + "cursor_peak_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove Precursor Peaks.... /// </summary> public static string RemovePrecursors_MenuItemText_Remove_Precursor_Peaks___ { get { return ResourceManager.GetString("RemovePrecursors_MenuItemText_Remove_Precursor_Peaks___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from {1} transitions?. /// </summary> public static string RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from__1__transitions_ { get { return ResourceManager.GetString("RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__" + "0__peaks_from__1__transitions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove these {0} peaks from one transition?. /// </summary> public static string RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__0__peaks_from_one_transition_ { get { return ResourceManager.GetString("RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_these__" + "0__peaks_from_one_transition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to remove this transition peak?. /// </summary> public static string RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_transition_peak_ { get { return ResourceManager.GetString("RemoveTransitions_GetConfirmRemoveMessage_Are_you_sure_you_want_to_remove_this_tr" + "ansition_peak_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove Transition Peaks.... /// </summary> public static string RemoveTransitions_MenuItemText_Remove_Transition_Peaks___ { get { return ResourceManager.GetString("RemoveTransitions_MenuItemText_Remove_Transition_Peaks___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add FASTA File. /// </summary> public static string RenameProteinsDlg_btnFASTA_Click_Add_FASTA_File { get { return ResourceManager.GetString("RenameProteinsDlg_btnFASTA_Click_Add_FASTA_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a current protein. /// </summary> public static string RenameProteinsDlg_OkDialog__0__is_not_a_current_protein { get { return ResourceManager.GetString("RenameProteinsDlg_OkDialog__0__is_not_a_current_protein", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot rename {0} more than once. Please remove either {1} or {2}. /// </summary> public static string RenameProteinsDlg_OkDialog_Cannot_rename__0__more_than_once__Please_remove_either__1__or__2__ { get { return ResourceManager.GetString("RenameProteinsDlg_OkDialog_Cannot_rename__0__more_than_once__Please_remove_either" + "__1__or__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit name {0}. /// </summary> public static string RenameProteinsDlg_OkDialog_Edit_name__0__ { get { return ResourceManager.GetString("RenameProteinsDlg_OkDialog_Edit_name__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No protein metadata available. /// </summary> public static string RenameProteinsDlg_UseAccessionOrPreferredNameorGene_No_protein_metadata_available { get { return ResourceManager.GetString("RenameProteinsDlg_UseAccessionOrPreferredNameorGene_No_protein_metadata_available" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading the file {0}. {1}. /// </summary> public static string RenameProteinsDlg_UseFastaFile_Failed_reading_the_file__0__1__ { get { return ResourceManager.GetString("RenameProteinsDlg_UseFastaFile_Failed_reading_the_file__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No protein sequence matches found between the current document and the FASTA file {0}. /// </summary> public static string RenameProteinsDlg_UseFastaFile_No_protein_sequence_matches_found_between_the_current_document_and_the_FASTA_file__0_ { get { return ResourceManager.GetString("RenameProteinsDlg_UseFastaFile_No_protein_sequence_matches_found_between_the_curr" + "ent_document_and_the_FASTA_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains a naming conflict. The name {0} is currently used by multiple protein sequences.. /// </summary> public static string RenameProteinsDlg_UseFastaFile_The_document_contains_a_naming_conflict_The_name__0__is_currently_used_by_multiple_protein_sequences { get { return ResourceManager.GetString("RenameProteinsDlg_UseFastaFile_The_document_contains_a_naming_conflict_The_name__" + "0__is_currently_used_by_multiple_protein_sequences", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please use a different name.. /// </summary> public static string RenameResultDlg_OkDialog_Please_use_a_different_name { get { return ResourceManager.GetString("RenameResultDlg_OkDialog_Please_use_a_different_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name {0} is already in use.. /// </summary> public static string RenameResultDlg_ReplicateName_The_name__0__is_already_in_use { get { return ResourceManager.GetString("RenameResultDlg_ReplicateName_The_name__0__is_already_in_use", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Replicate { get { object obj = ResourceManager.GetObject("Replicate", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to There is already a replicate named &apos;{0}&apos;.. /// </summary> public static string Replicate_Name_There_is_already_a_replicate_named___0___ { get { return ResourceManager.GetString("Replicate_Name_There_is_already_a_replicate_named___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate. /// </summary> public static string ReplicateGroupOp_ReplicateAxisTitle { get { return ResourceManager.GetString("ReplicateGroupOp_ReplicateAxisTitle", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error report will be posted.. /// </summary> public static string ReportErrorDlg_ReportErrorDlg_An_error_report_will_be_posted { get { return ResourceManager.GetString("ReportErrorDlg_ReportErrorDlg_An_error_report_will_be_posted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error has occurred, as shown below.. /// </summary> public static string ReportErrorDlg_ReportErrorDlg_An_unexpected_error_has_occurred_as_shown_below { get { return ResourceManager.GetString("ReportErrorDlg_ReportErrorDlg_An_unexpected_error_has_occurred_as_shown_below", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Close. /// </summary> public static string ReportErrorDlg_ReportErrorDlg_Close { get { return ResourceManager.GetString("ReportErrorDlg_ReportErrorDlg_Close", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Report the error to help improve Skyline.. /// </summary> public static string ReportShutdownDlg_ReportShutdownDlg_Report_the_error_to_help_improve_Skyline_ { get { return ResourceManager.GetString("ReportShutdownDlg_ReportShutdownDlg_Report_the_error_to_help_improve_Skyline_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline had an unexpected error the last time you ran it.. /// </summary> public static string ReportShutdownDlg_ReportShutdownDlg_Skyline_had_an_unexpected_error_the_last_time_you_ran_it_ { get { return ResourceManager.GetString("ReportShutdownDlg_ReportShutdownDlg_Skyline_had_an_unexpected_error_the_last_time" + "_you_ran_it_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected Shutdown. /// </summary> public static string ReportShutdownDlg_ReportShutdownDlg_Unexpected_Shutdown { get { return ResourceManager.GetString("ReportShutdownDlg_ReportShutdownDlg_Unexpected_Shutdown", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; is not a valid table name.. /// </summary> public static string ReportSpec_GetTable_The_name__0__is_not_a_valid_table_name { get { return ResourceManager.GetString("ReportSpec_GetTable_The_name__0__is_not_a_valid_table_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to find the table for the column {0}.. /// </summary> public static string ReportSpec_ReadColumns_Failed_to_find_the_table_for_the_column__0__ { get { return ResourceManager.GetString("ReportSpec_ReadColumns_Failed_to_find_the_table_for_the_column__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing table name.. /// </summary> public static string ReportSpec_ReadXml_Missing_table_name { get { return ResourceManager.GetString("ReportSpec_ReadXml_Missing_table_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; is not a valid table name.. /// </summary> public static string ReportSpec_ReadXml_The_name__0__is_not_a_valid_table_name { get { return ResourceManager.GetString("ReportSpec_ReadXml_The_name__0__is_not_a_valid_table_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting {0} report. /// </summary> public static string ReportSpec_ReportToCsvString_Exporting__0__report { get { return ResourceManager.GetString("ReportSpec_ReportToCsvString_Exporting__0__report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error occurred while analyzing the current document.. /// </summary> public static string ReportSpecList_EditItem_An_unexpected_error_occurred_while_analyzing_the_current_document { get { return ResourceManager.GetString("ReportSpecList_EditItem_An_unexpected_error_occurred_while_analyzing_the_current_" + "document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Boundaries. /// </summary> public static string ReportSpecList_GetDefaults_Peak_Boundaries { get { return ResourceManager.GetString("ReportSpecList_GetDefaults_Peak_Boundaries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Ratio Results. /// </summary> public static string ReportSpecList_GetDefaults_Peptide_Ratio_Results { get { return ResourceManager.GetString("ReportSpecList_GetDefaults_Peptide_Ratio_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide RT Results. /// </summary> public static string ReportSpecList_GetDefaults_Peptide_RT_Results { get { return ResourceManager.GetString("ReportSpecList_GetDefaults_Peptide_RT_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition Results. /// </summary> public static string ReportSpecList_GetDefaults_Transition_Results { get { return ResourceManager.GetString("ReportSpecList_GetDefaults_Transition_Results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Report:. /// </summary> public static string ReportSpecList_Label_Report { get { return ResourceManager.GetString("ReportSpecList_Label_Report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Reports. /// </summary> public static string ReportSpecList_Title_Edit_Reports { get { return ResourceManager.GetString("ReportSpecList_Title_Edit_Reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All results must be completely imported before they can be re-scored.. /// </summary> public static string RescoreResultsDlg_Rescore_All_results_must_be_completely_imported_before_they_can_be_re_scored_ { get { return ResourceManager.GetString("RescoreResultsDlg_Rescore_All_results_must_be_completely_imported_before_they_can" + "_be_re_scored_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are not results in this document. /// </summary> public static string RescoreResultsDlg_Rescore_There_are_not_results_in_this_document { get { return ResourceManager.GetString("RescoreResultsDlg_Rescore_There_are_not_results_in_this_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to In certain cases, you may want to have Skyline re-calculate peaks and re-score them based on the existing chromatogram data. Chromatograms will not be re-imported from raw data files, but peak integration information may change.. /// </summary> public static string RescoreResultsDlg_RescoreResultsDlg_In_certain_cases__you_may_want_to_have_Skyline_re_calculate_peaks_and_re_score_them_based_on_the_existing_chromatogram_data___Chromatograms_will_not_be_re_imported_from_raw_data_files__but_peak_integration_information_may_change_ { get { return ResourceManager.GetString(@"RescoreResultsDlg_RescoreResultsDlg_In_certain_cases__you_may_want_to_have_Skyline_re_calculate_peaks_and_re_score_them_based_on_the_existing_chromatogram_data___Chromatograms_will_not_be_re_imported_from_raw_data_files__but_peak_integration_information_may_change_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Quantification. /// </summary> public static string Resources_ReportSpecList_GetDefaults_Peptide_Quantification { get { return ResourceManager.GetString("Resources.ReportSpecList_GetDefaults_Peptide_Quantification", resourceCulture); } } /// <summary> /// Looks up a localized string similar to DocNode peak info found for file with no match in document results.. /// </summary> public static string Results_Validate_DocNode_peak_info_found_for_file_with_no_match_in_document_results { get { return ResourceManager.GetString("Results_Validate_DocNode_peak_info_found_for_file_with_no_match_in_document_resul" + "ts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to DocNode results count {0} does not match document results count {1}.. /// </summary> public static string Results_Validate_DocNode_results_count__0__does_not_match_document_results_count__1__ { get { return ResourceManager.GetString("Results_Validate_DocNode_results_count__0__does_not_match_document_results_count_" + "_1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Element not found. /// </summary> public static string ResultsGrid_ChangeChromInfo_Element_not_found { get { return ResourceManager.GetString("ResultsGrid_ChangeChromInfo_Element_not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Acquired Time. /// </summary> public static string ResultsGrid_ResultsGrid_Acquired_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Acquired_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Area. /// </summary> public static string ResultsGrid_ResultsGrid_Area { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Area Ratio. /// </summary> public static string ResultsGrid_ResultsGrid_Area_Ratio { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Area_Ratio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Average Mass Error PPM. /// </summary> public static string ResultsGrid_ResultsGrid_Average_Mass_Error_PPM { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Average_Mass_Error_PPM", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Background. /// </summary> public static string ResultsGrid_ResultsGrid_Background { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Background", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Best Retention Time. /// </summary> public static string ResultsGrid_ResultsGrid_Best_Retention_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Best_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit note. /// </summary> public static string ResultsGrid_ResultsGrid_CellEndEdit_Edit_note { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_CellEndEdit_Edit_note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Count Truncated. /// </summary> public static string ResultsGrid_ResultsGrid_Count_Truncated { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Count_Truncated", resourceCulture); } } /// <summary> /// Looks up a localized string similar to End Time. /// </summary> public static string ResultsGrid_ResultsGrid_End_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_End_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File Name. /// </summary> public static string ResultsGrid_ResultsGrid_File_Name { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_File_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fwhm. /// </summary> public static string ResultsGrid_ResultsGrid_Fwhm { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Fwhm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Height. /// </summary> public static string ResultsGrid_ResultsGrid_Height { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Height", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Identified. /// </summary> public static string ResultsGrid_ResultsGrid_Identified { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Identified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope Dot Product. /// </summary> public static string ResultsGrid_ResultsGrid_Isotope_Dot_Product { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Isotope_Dot_Product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library Dot Product. /// </summary> public static string ResultsGrid_ResultsGrid_Library_Dot_Product { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Library_Dot_Product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Error PPM. /// </summary> public static string ResultsGrid_ResultsGrid_Mass_Error_PPM { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Mass_Error_PPM", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max End Time. /// </summary> public static string ResultsGrid_ResultsGrid_Max_End_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Max_End_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max Fwhm. /// </summary> public static string ResultsGrid_ResultsGrid_Max_Fwhm { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Max_Fwhm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max Height. /// </summary> public static string ResultsGrid_ResultsGrid_Max_Height { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Max_Height", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Min Start Time. /// </summary> public static string ResultsGrid_ResultsGrid_Min_Start_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Min_Start_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modified Time. /// </summary> public static string ResultsGrid_ResultsGrid_Modified_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Modified_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opt Collision Energy. /// </summary> public static string ResultsGrid_ResultsGrid_Opt_Collision_Energy { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Opt_Collision_Energy", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opt Compensation Voltage. /// </summary> public static string ResultsGrid_ResultsGrid_Opt_Compensation_Voltage { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Opt_Compensation_Voltage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opt Declustering Potential. /// </summary> public static string ResultsGrid_ResultsGrid_Opt_Declustering_Potential { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Opt_Declustering_Potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opt Step. /// </summary> public static string ResultsGrid_ResultsGrid_Opt_Step { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Opt_Step", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Rank. /// </summary> public static string ResultsGrid_ResultsGrid_Peak_Rank { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Peak_Rank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Peak Found Ratio. /// </summary> public static string ResultsGrid_ResultsGrid_Peptide_Peak_Found_Ratio { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Peptide_Peak_Found_Ratio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Retention Time. /// </summary> public static string ResultsGrid_ResultsGrid_Peptide_Retention_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Peptide_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Peak Found Ratio. /// </summary> public static string ResultsGrid_ResultsGrid_Precursor_Peak_Found_Ratio { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Precursor_Peak_Found_Ratio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor Replicate Note. /// </summary> public static string ResultsGrid_ResultsGrid_Precursor_Replicate_Note { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Precursor_Replicate_Note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio Dot Product. /// </summary> public static string ResultsGrid_ResultsGrid_Ratio_Dot_Product { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Ratio_Dot_Product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio to Global Standards. /// </summary> public static string ResultsGrid_ResultsGrid_Ratio_to_Global_Standards { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Ratio_to_Global_Standards", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio To Standard. /// </summary> public static string ResultsGrid_ResultsGrid_Ratio_To_Standard { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Ratio_To_Standard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ratio To Standard Dot Product. /// </summary> public static string ResultsGrid_ResultsGrid_Ratio_To_Standard_Dot_Product { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Ratio_To_Standard_Dot_Product", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate Name. /// </summary> public static string ResultsGrid_ResultsGrid_Replicate_Name { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Replicate_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string ResultsGrid_ResultsGrid_Retention_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sample Name. /// </summary> public static string ResultsGrid_ResultsGrid_Sample_Name { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Sample_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start Time. /// </summary> public static string ResultsGrid_ResultsGrid_Start_Time { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Start_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Area. /// </summary> public static string ResultsGrid_ResultsGrid_Total_Area { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Total_Area", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Area Ratio. /// </summary> public static string ResultsGrid_ResultsGrid_Total_Area_Ratio { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Total_Area_Ratio", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Total Background. /// </summary> public static string ResultsGrid_ResultsGrid_Total_Background { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Total_Background", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition Replicate Note. /// </summary> public static string ResultsGrid_ResultsGrid_Transition_Replicate_Note { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Transition_Replicate_Note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Truncated. /// </summary> public static string ResultsGrid_ResultsGrid_Truncated { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_Truncated", resourceCulture); } } /// <summary> /// Looks up a localized string similar to User Set. /// </summary> public static string ResultsGrid_ResultsGrid_User_Set { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_User_Set", resourceCulture); } } /// <summary> /// Looks up a localized string similar to User Set Total. /// </summary> public static string ResultsGrid_ResultsGrid_User_Set_Total { get { return ResourceManager.GetString("ResultsGrid_ResultsGrid_User_Set_Total", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The retention time calculator {0} is not valid.. /// </summary> public static string RetentionScoreCalculator_Validate_The_retention_time_calculator__0__is_not_valid { get { return ResourceManager.GetString("RetentionScoreCalculator_Validate_The_retention_time_calculator__0__is_not_valid", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Retention Time Regression:. /// </summary> public static string RetentionTimeList_Label_Retention_Time_Regression { get { return ResourceManager.GetString("RetentionTimeList_Label_Retention_Time_Regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Retention Time Regressions. /// </summary> public static string RetentionTimeList_Title_Edit_Retention_Time_Regressions { get { return ResourceManager.GetString("RetentionTimeList_Title_Edit_Retention_Time_Regressions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finding threshold. /// </summary> public static string RetentionTimeRegression_FindThreshold_Finding_threshold { get { return ResourceManager.GetString("RetentionTimeRegression_FindThreshold_Finding_threshold", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Recalculating regression. /// </summary> public static string RetentionTimeRegression_RecalcRegression_Recalculating_regression { get { return ResourceManager.GetString("RetentionTimeRegression_RecalcRegression_Recalculating_regression", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Slope and intercept must both have values or both not have values. /// </summary> public static string RetentionTimeRegression_RetentionTimeRegression_Slope_and_intercept_must_both_have_values_or_both_not_have_values { get { return ResourceManager.GetString("RetentionTimeRegression_RetentionTimeRegression_Slope_and_intercept_must_both_hav" + "e_values_or_both_not_have_values", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid negative retention time window {0}.. /// </summary> public static string RetentionTimeRegression_Validate_Invalid_negative_retention_time_window__0__ { get { return ResourceManager.GetString("RetentionTimeRegression_Validate_Invalid_negative_retention_time_window__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention time regression must specify a sequence to score calculator.. /// </summary> public static string RetentionTimeRegression_Validate_Retention_time_regression_must_specify_a_sequence_to_score_calculator { get { return ResourceManager.GetString("RetentionTimeRegression_Validate_Retention_time_regression_must_specify_a_sequenc" + "e_to_score_calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Check your network connection or contact the tool provider for installation support.. /// </summary> public static string RInstaller_DownloadPackages_Check_your_network_connection_or_contact_the_tool_provider_for_installation_support_ { get { return ResourceManager.GetString("RInstaller_DownloadPackages_Check_your_network_connection_or_contact_the_tool_pro" + "vider_for_installation_support_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Download failed.. /// </summary> public static string RInstaller_DownloadR_Download_failed_ { get { return ResourceManager.GetString("RInstaller_DownloadR_Download_failed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading packages. /// </summary> public static string RInstaller_GetPackages_Downloading_packages { get { return ResourceManager.GetString("RInstaller_GetPackages_Downloading_packages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing Packages. /// </summary> public static string RInstaller_GetPAckages_Installing_Packages { get { return ResourceManager.GetString("RInstaller_GetPAckages_Installing_Packages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Package installation complete.. /// </summary> public static string RInstaller_GetPackages_Package_installation_complete_ { get { return ResourceManager.GetString("RInstaller_GetPackages_Package_installation_complete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing R. /// </summary> public static string RInstaller_GetR_Installing_R { get { return ResourceManager.GetString("RInstaller_GetR_Installing_R", resourceCulture); } } /// <summary> /// Looks up a localized string similar to R installation complete.. /// </summary> public static string RInstaller_GetR_R_installation_complete_ { get { return ResourceManager.GetString("RInstaller_GetR_R_installation_complete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Download Canceled. /// </summary> public static string RInstaller_InstallPackages_Download_Canceled { get { return ResourceManager.GetString("RInstaller_InstallPackages_Download_Canceled", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Failed to connect to the website {0}. /// </summary> public static string RInstaller_InstallPackages_Error__Failed_to_connect_to_the_website__0_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Error__Failed_to_connect_to_the_website__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: No internet connection.. /// </summary> public static string RInstaller_InstallPackages_Error__No_internet_connection_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Error__No_internet_connection_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: Package installation did not complete. Output logged to the Immediate Window.. /// </summary> public static string RInstaller_InstallPackages_Error__Package_installation_did_not_complete__Output_logged_to_the_Immediate_Window_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Error__Package_installation_did_not_complete__Output_l" + "ogged_to_the_Immediate_Window_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error Installing Packages. /// </summary> public static string RInstaller_InstallPackages_Error_Installing_Packages { get { return ResourceManager.GetString("RInstaller_InstallPackages_Error_Installing_Packages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing R packages requires an internet connection. Please check your connection and try again. /// </summary> public static string RInstaller_InstallPackages_Installing_R_packages_requires_an_internet_connection__Please_check_your_connection_and_try_again { get { return ResourceManager.GetString("RInstaller_InstallPackages_Installing_R_packages_requires_an_internet_connection_" + "_Please_check_your_connection_and_try_again", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Output logged to the Immediate Window.. /// </summary> public static string RInstaller_InstallPackages_Output_logged_to_the_Immediate_Window_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Output_logged_to_the_Immediate_Window_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Package installation failed. Error log output in immediate window.. /// </summary> public static string RInstaller_InstallPackages_Package_installation_failed__Error_log_output_in_immediate_window_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Package_installation_failed__Error_log_output_in_immed" + "iate_window_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following packages failed to install:. /// </summary> public static string RInstaller_InstallPackages_The_following_packages_failed_to_install_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_The_following_packages_failed_to_install_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The package {0} failed to install:. /// </summary> public static string RInstaller_InstallPackages_The_package__0__failed_to_install_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_The_package__0__failed_to_install_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown Error installing packages. Output logged to the Immediate Window.. /// </summary> public static string RInstaller_InstallPackages_Unknown_Error_installing_packages__Output_logged_to_the_Immediate_Window_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Unknown_Error_installing_packages__Output_logged_to_th" + "e_Immediate_Window_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown error installing packages. Tool Installation Failed.. /// </summary> public static string RInstaller_InstallPackages_Unknown_error_installing_packages__Tool_Installation_Failed_ { get { return ResourceManager.GetString("RInstaller_InstallPackages_Unknown_error_installing_packages__Tool_Installation_F" + "ailed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading R. /// </summary> public static string RInstaller_InstallR_Downloading_R { get { return ResourceManager.GetString("RInstaller_InstallR_Downloading_R", resourceCulture); } } /// <summary> /// Looks up a localized string similar to R installation was not completed. Cancelling tool installation.. /// </summary> public static string RInstaller_InstallR_R_installation_was_not_completed__Cancelling_tool_installation_ { get { return ResourceManager.GetString("RInstaller_InstallR_R_installation_was_not_completed__Cancelling_tool_installatio" + "n_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires the use of R {0}. Click Install to begin the installation process.. /// </summary> public static string RInstaller_RInstaller_Load_This_tool_requires_the_use_of_R__0___Click_Install_to_begin_the_installation_process_ { get { return ResourceManager.GetString("RInstaller_RInstaller_Load_This_tool_requires_the_use_of_R__0___Click_Install_to_" + "begin_the_installation_process_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires the use of R {0} and the following packages.. /// </summary> public static string RInstaller_RInstaller_Load_This_tool_requires_the_use_of_R__0__and_the_following_packages_ { get { return ResourceManager.GetString("RInstaller_RInstaller_Load_This_tool_requires_the_use_of_R__0__and_the_following_" + "packages_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This Tool requires the use of the following R Packages.. /// </summary> public static string RInstaller_RInstaller_Load_This_Tool_requires_the_use_of_the_following_R_Packages_ { get { return ResourceManager.GetString("RInstaller_RInstaller_Load_This_Tool_requires_the_use_of_the_following_R_Packages" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} aligned to {1}. /// </summary> public static string RtAlignment_AxisTitleAlignedTo { get { return ResourceManager.GetString("RtAlignment_AxisTitleAlignedTo", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed setting data to clipboard.. /// </summary> public static string RTDetails_gridStatistics_KeyDown_Failed_setting_data_to_clipboard { get { return ResourceManager.GetString("RTDetails_gridStatistics_KeyDown_Failed_setting_data_to_clipboard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to FWB Time. /// </summary> public static string RtGraphValue_FWB_Time { get { return ResourceManager.GetString("RtGraphValue_FWB_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to FWHM Time. /// </summary> public static string RtGraphValue_FWHM_Time { get { return ResourceManager.GetString("RtGraphValue_FWHM_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string RtGraphValue_Retention_Time { get { return ResourceManager.GetString("RtGraphValue_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured Time. /// </summary> public static string RTLinearRegressionGraphPane_RTLinearRegressionGraphPane_Measured_Time { get { return ResourceManager.GetString("RTLinearRegressionGraphPane_RTLinearRegressionGraphPane_Measured_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Score. /// </summary> public static string RTLinearRegressionGraphPane_RTLinearRegressionGraphPane_Score { get { return ResourceManager.GetString("RTLinearRegressionGraphPane_RTLinearRegressionGraphPane_Score", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating.... /// </summary> public static string RTLinearRegressionGraphPane_UpdateGraph_Calculating___ { get { return ResourceManager.GetString("RTLinearRegressionGraphPane_UpdateGraph_Calculating___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Time. /// </summary> public static string RTPeptideGraphPane_UpdateAxes_Retention_Time { get { return ResourceManager.GetString("RTPeptideGraphPane_UpdateAxes_Retention_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Time. /// </summary> public static string RTPeptideGraphPane_UpdateAxes_Time { get { return ResourceManager.GetString("RTPeptideGraphPane_UpdateAxes_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured Time. /// </summary> public static string RTReplicateGraphPane_RTReplicateGraphPane_Measured_Time { get { return ResourceManager.GetString("RTReplicateGraphPane_RTReplicateGraphPane_Measured_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results available. /// </summary> public static string RTReplicateGraphPane_UpdateGraph_No_results_available { get { return ResourceManager.GetString("RTReplicateGraphPane_UpdateGraph_No_results_available", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select a peptide to see the retention time graph. /// </summary> public static string RTReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_retention_time_graph { get { return ResourceManager.GetString("RTReplicateGraphPane_UpdateGraph_Select_a_peptide_to_see_the_retention_time_graph" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Step {0}. /// </summary> public static string RTReplicateGraphPane_UpdateGraph_Step__0__ { get { return ResourceManager.GetString("RTReplicateGraphPane_UpdateGraph_Step__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} Minute Window. /// </summary> public static string RTScheduleGraphPane_AddCurve__0__Minute_Window { get { return ResourceManager.GetString("RTScheduleGraphPane_AddCurve__0__Minute_Window", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scheduled Time. /// </summary> public static string RTScheduleGraphPane_RTScheduleGraphPane_Scheduled_Time { get { return ResourceManager.GetString("RTScheduleGraphPane_RTScheduleGraphPane_Scheduled_Time", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Concurrent Accumulations. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Concurrent_Accumulations { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Concurrent_Accumulations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Concurrent frames. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Concurrent_frames { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Concurrent_frames", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Concurrent Precursors. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Concurrent_Precursors { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Concurrent_Precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Concurrent Transitions. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Concurrent_Transitions { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Concurrent_Transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max sampling times (seconds). /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Max_sampling_times { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Max_sampling_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mean sampling times (seconds). /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Mean_sampling_times { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Mean_sampling_times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Redundancy of targets. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Redundancy_of_targets { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Redundancy_of_targets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rank of target. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Target { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets per frame. /// </summary> public static string RTScheduleGraphPane_UpdateGraph_Targets_per_frame { get { return ResourceManager.GetString("RTScheduleGraphPane_UpdateGraph_Targets_per_frame", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The regression(s):. /// </summary> public static string RTScoreCalculatorList_AcceptList_The_regressions { get { return ResourceManager.GetString("RTScoreCalculatorList_AcceptList_The_regressions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to will be deleted because the calculators they depend on have changed. Do you want to continue?. /// </summary> public static string RTScoreCalculatorList_AcceptList_will_be_deleted_because_the_calculators_they_depend_on_have_changed_Do_you_want_to_continue { get { return ResourceManager.GetString("RTScoreCalculatorList_AcceptList_will_be_deleted_because_the_calculators_they_dep" + "end_on_have_changed_Do_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Retention Time Calculators:. /// </summary> public static string RTScoreCalculatorList_Label_Retention_Time_Calculators { get { return ResourceManager.GetString("RTScoreCalculatorList_Label_Retention_Time_Calculators", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Retention Time Calculators. /// </summary> public static string RTScoreCalculatorList_Title_Edit_Retention_Time_Calculators { get { return ResourceManager.GetString("RTScoreCalculatorList_Title_Edit_Retention_Time_Calculators", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred applying the rule &apos;{0}&apos;:. /// </summary> public static string RuleError_ToString_An_error_occurred_applying_the_rule___0___ { get { return ResourceManager.GetString("RuleError_ToString_An_error_occurred_applying_the_rule___0___", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Save { get { object obj = ResourceManager.GetObject("Save", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Do you want to overwrite the existing settings?. /// </summary> public static string SaveSettingsDlg_OnClosing_Do_you_want_to_overwrite_the_existing_settings { get { return ResourceManager.GetString("SaveSettingsDlg_OnClosing_Do_you_want_to_overwrite_the_existing_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name {0} already exists.. /// </summary> public static string SaveSettingsDlg_OnClosing_The_name__0__already_exists { get { return ResourceManager.GetString("SaveSettingsDlg_OnClosing_The_name__0__already_exists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SavitzkyGolaySmoother: Invalid window size ({0}): argument must be positive and odd. /// </summary> public static string SavitzkyGolaySmoother_WindowSize_SavitzkyGolaySmoother__Invalid_window_size___0____argument_must_be_positive_and_odd { get { return ResourceManager.GetString("SavitzkyGolaySmoother_WindowSize_SavitzkyGolaySmoother__Invalid_window_size___0__" + "__argument_must_be_positive_and_odd", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The data file {0} could not be found, either at its original location or in the document or document parent folder.. /// </summary> public static string ScanProvider_GetScans_The_data_file__0__could_not_be_found__either_at_its_original_location_or_in_the_document_or_document_parent_folder_ { get { return ResourceManager.GetString("ScanProvider_GetScans_The_data_file__0__could_not_be_found__either_at_its_origina" + "l_location_or_in_the_document_or_document_parent_folder_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The scan ID {0} was not found in the file {1}.. /// </summary> public static string ScanProvider_GetScans_The_scan_ID__0__was_not_found_in_the_file__1__ { get { return ResourceManager.GetString("ScanProvider_GetScans_The_scan_ID__0__was_not_found_in_the_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Template file is not valid.. /// </summary> public static string SchedulingGraphPropertyDlg_OkDialog_Template_file_is_not_valid_ { get { return ResourceManager.GetString("SchedulingGraphPropertyDlg_OkDialog_Template_file_is_not_valid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The replicate {0} contains peptides without enough information to rank transitions for triggered acquisition.. /// </summary> public static string SchedulingOptionsDlg_OkDialog_The_replicate__0__contains_peptides_without_enough_information_to_rank_transitions_for_triggered_acquisition_ { get { return ResourceManager.GetString("SchedulingOptionsDlg_OkDialog_The_replicate__0__contains_peptides_without_enough_" + "information_to_rank_transitions_for_triggered_acquisition_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Using trends in scheduling requires at least {0} replicates.. /// </summary> public static string SchedulingOptionsDlg_TrendsError_Using_trends_in_scheduling_requires_at_least__0__replicates { get { return ResourceManager.GetString("SchedulingOptionsDlg_TrendsError_Using_trends_in_scheduling_requires_at_least__0_" + "_replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Additional Settings. /// </summary> public static string SearchSettingsControl_Additional_Settings { get { return ResourceManager.GetString("SearchSettingsControl_Additional_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to save your current settings before switching?. /// </summary> public static string SelectSettingsHandler_ToolStripMenuItemClick_Do_you_want_to_save_your_current_settings_before_switching { get { return ResourceManager.GetString("SelectSettingsHandler_ToolStripMenuItemClick_Do_you_want_to_save_your_current_set" + "tings_before_switching", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor isotope {0} is outside the isotope distribution {1} to {2}.. /// </summary> public static string SequenceMassCalc_GetFragmentMass_Precursor_isotope__0__is_outside_the_isotope_distribution__1__to__2__ { get { return ResourceManager.GetString("SequenceMassCalc_GetFragmentMass_Precursor_isotope__0__is_outside_the_isotope_dis" + "tribution__1__to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No formula found for the amino acid &apos;{0}&apos;. /// </summary> public static string SequenceMassCalc_GetHeavyFormula_No_formula_found_for_the_amino_acid___0__ { get { return ResourceManager.GetString("SequenceMassCalc_GetHeavyFormula_No_formula_found_for_the_amino_acid___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modification definition {0} missing close bracket.. /// </summary> public static string SequenceMassCalc_NormalizeModifiedSequence_Modification_definition__0__missing_close_bracket_ { get { return ResourceManager.GetString("SequenceMassCalc_NormalizeModifiedSequence_Modification_definition__0__missing_cl" + "ose_bracket_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The modification {0} is not valid. Expected a numeric delta mass.. /// </summary> public static string SequenceMassCalc_NormalizeModifiedSequence_The_modification__0__is_not_valid___Expected_a_numeric_delta_mass_ { get { return ResourceManager.GetString("SequenceMassCalc_NormalizeModifiedSequence_The_modification__0__is_not_valid___Ex" + "pected_a_numeric_delta_mass_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The expression &apos;{0}&apos; is not a valid chemical formula.. /// </summary> public static string SequenceMassCalc_ParseModMass_The_expression__0__is_not_a_valid_chemical_formula { get { return ResourceManager.GetString("SequenceMassCalc_ParseModMass_The_expression__0__is_not_a_valid_chemical_formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets by Accession. /// </summary> public static string SequenceTreeForm_UpdateTitle_Targets_by_Accession { get { return ResourceManager.GetString("SequenceTreeForm_UpdateTitle_Targets_by_Accession", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets by Gene. /// </summary> public static string SequenceTreeForm_UpdateTitle_Targets_by_Gene { get { return ResourceManager.GetString("SequenceTreeForm_UpdateTitle_Targets_by_Gene", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets by Preferred Name. /// </summary> public static string SequenceTreeForm_UpdateTitle_Targets_by_Preferred_Name { get { return ResourceManager.GetString("SequenceTreeForm_UpdateTitle_Targets_by_Preferred_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading {0}.. /// </summary> public static string SerializableSettingsList_ImportFile_Failure_loading__0__ { get { return ResourceManager.GetString("SerializableSettingsList_ImportFile_Failure_loading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unsupported file version {0}. /// </summary> public static string Serializer_ReadHeader_Unsupported_file_version__0_ { get { return ResourceManager.GetString("Serializer_ReadHeader_Unsupported_file_version__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A Panorama server must be specified.. /// </summary> public static string Server_ReadXml_A_Panorama_server_must_be_specified { get { return ResourceManager.GetString("Server_ReadXml_A_Panorama_server_must_be_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Server URL is corrupt.. /// </summary> public static string Server_ReadXml_Server_URL_is_corrupt { get { return ResourceManager.GetString("Server_ReadXml_Server_URL_is_corrupt", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Servers. /// </summary> public static string ServerList_Label__Servers { get { return ResourceManager.GetString("ServerList_Label__Servers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Servers. /// </summary> public static string ServerList_Title_Edit_Servers { get { return ResourceManager.GetString("ServerList_Title_Edit_Servers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string SettingsList_ELEMENT_NONE_None { get { return ResourceManager.GetString("SettingsList_ELEMENT_NONE_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Add...&gt;. /// </summary> public static string SettingsListComboDriver_Add { get { return ResourceManager.GetString("SettingsListComboDriver_Add", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Edit current...&gt;. /// </summary> public static string SettingsListComboDriver_Edit_current { get { return ResourceManager.GetString("SettingsListComboDriver_Edit_current", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &lt;Edit list...&gt;. /// </summary> public static string SettingsListComboDriver_Edit_list { get { return ResourceManager.GetString("SettingsListComboDriver_Edit_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Incorrect number of columns ({0}) found on line {1}.. /// </summary> public static string SettingsUIUtil_DoPasteText_Incorrect_number_of_columns__0__found_on_line__1__ { get { return ResourceManager.GetString("SettingsUIUtil_DoPasteText_Incorrect_number_of_columns__0__found_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to replace them?. /// </summary> public static string ShareListDlg_ImportFile_Do_you_want_to_replace_them { get { return ResourceManager.GetString("ShareListDlg_ImportFile_Do_you_want_to_replace_them", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading {0}.. /// </summary> public static string ShareListDlg_ImportFile_Failure_loading__0__ { get { return ResourceManager.GetString("ShareListDlg_ImportFile_Failure_loading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following names already exist:. /// </summary> public static string ShareListDlg_ImportFile_The_following_names_already_exist { get { return ResourceManager.GetString("ShareListDlg_ImportFile_The_following_names_already_exist", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; already exists. Do you want to replace it?. /// </summary> public static string ShareListDlg_ImportFile_The_name__0__already_exists_Do_you_want_to_replace_it { get { return ResourceManager.GetString("ShareListDlg_ImportFile_The_name__0__already_exists_Do_you_want_to_replace_it", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Save {0}. /// </summary> public static string ShareListDlg_Label_Save__0__ { get { return ResourceManager.GetString("ShareListDlg_Label_Save__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Select the {0} you want to save to a file.. /// </summary> public static string ShareListDlg_Label_Select_the__0__you_want_to_save_to_a_file { get { return ResourceManager.GetString("ShareListDlg_Label_Select_the__0__you_want_to_save_to_a_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred:. /// </summary> public static string ShareListDlg_OkDialog_An_error_occurred { get { return ResourceManager.GetString("ShareListDlg_OkDialog_An_error_occurred", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Settings. /// </summary> public static string ShareListDlg_ShareListDlg_Skyline_Settings { get { return ResourceManager.GetString("ShareListDlg_ShareListDlg_Skyline_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot open output file.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Cannot_open_output_file_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Cannot_open_output_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error copying template file {0} to destination {1}.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Error_copying_template_file__0__to_destination__1__ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Error_copying_template_file__0__to_destinatio" + "n__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exception raised during output serialization.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Exception_raised_during_output_serialization_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Exception_raised_during_output_serialization_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input events are not contiguous.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Input_events_are_not_contiguous_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Input_events_are_not_contiguous_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input events are not in ascending order. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Input_events_are_not_in_ascending_order { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Input_events_are_not_in_ascending_order", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input string cannot be parsed.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Input_string_cannot_be_parsed_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Input_string_cannot_be_parsed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input string is empty.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Input_string_is_empty_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Input_string_is_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid parameter. Cannot create output method.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Invalid_parameter__Cannot_create_output_method_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Invalid_parameter__Cannot_create_output_metho" + "d_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Number of events exceed the maximum allowed by LabSolutions (1000).. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Number_of_events_exceed_the_maximum_allowed_by_LabSolutions__1000__ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Number_of_events_exceed_the_maximum_allowed_b" + "y_LabSolutions__1000__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Output file type is not supported.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Output_file_type_is_not_supported_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Output_file_type_is_not_supported_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Output method does not contain any events.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Output_method_does_not_contain_any_events_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Output_method_does_not_contain_any_events_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Output path is not specified.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Output_path_is_not_specified_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Output_path_is_not_specified_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shimadzu method writer encountered an error:. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Shimadzu_method_writer_encountered_an_error_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Shimadzu_method_writer_encountered_an_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The transition count {0} exceeds the maximum allowed for this instrument type.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_The_transition_count__0__exceeds_the_maximum_allowed_for_this_instrument_type_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_The_transition_count__0__exceeds_the_maximum_" + "allowed_for_this_instrument_type_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected response {0} from Shimadzu method writer.. /// </summary> public static string ShimadzuMethodExporter_ExportMethod_Unexpected_response__0__from_Shimadzu_method_writer_ { get { return ResourceManager.GetString("ShimadzuMethodExporter_ExportMethod_Unexpected_response__0__from_Shimadzu_method_" + "writer_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot open output file.. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Cannot_open_output_file_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Cannot_open_output_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input events are not contiguous.. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Input_events_are_not_contiguous_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Input_events_are_not_contiguous_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input events are not in ascending order. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Input_events_are_not_in_ascending_order { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Input_events_are_not_in_ascending" + "_order", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input string cannot be parsed.. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Input_string_cannot_be_parsed_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Input_string_cannot_be_parsed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input string is empty.. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Input_string_is_empty_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Input_string_is_empty_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid parameter. Cannot create output method.. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Invalid_parameter__Cannot_create_output_method_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Invalid_parameter__Cannot_create_" + "output_method_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Number of events exceed maximum allowed by LabSolutions (1000).. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Number_of_events_exceed_maximum_allowed_by_LabSolutions__1000__ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Number_of_events_exceed_maximum_a" + "llowed_by_LabSolutions__1000__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shimadzu method converter encountered an error:. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Shimadzu_method_converter_encountered_an_error_ { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Shimadzu_method_converter_encount" + "ered_an_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The transition count {0} exceeds the maximum allowed for this instrument type. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_The_transition_count__0__exceeds_the_maximum_allowed_for_this_instrument_type { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_The_transition_count__0__exceeds_" + "the_maximum_allowed_for_this_instrument_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected response {0} from Shimadzu method converter. /// </summary> public static string ShimadzuNativeMassListExporter_ExportNativeList_Unexpected_response__0__from_Shimadzu_method_converter { get { return ResourceManager.GetString("ShimadzuNativeMassListExporter_ExportNativeList_Unexpected_response__0__from_Shim" + "adzu_method_converter", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Index is out of range. /// </summary> public static string SizedSet_Add_SizedSet_index_value_is_out_of_range { get { return ResourceManager.GetString("SizedSet_Add_SizedSet_index_value_is_out_of_range", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please provide an email address at which we can contact you if we have any further questions regarding the error. ///If you prefer you can choose to report anonymously.. /// </summary> public static string SkippedReportErrorDlg_btnOK_Click_No_Email { get { return ResourceManager.GetString("SkippedReportErrorDlg_btnOK_Click_No_Email", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Icon similar to (Icon). /// </summary> public static System.Drawing.Icon Skyline { get { object obj = ResourceManager.GetObject("Skyline", resourceCulture); return ((System.Drawing.Icon)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Icon similar to (Icon). /// </summary> public static System.Drawing.Icon Skyline_Daily { get { object obj = ResourceManager.GetObject("Skyline_Daily", resourceCulture); return ((System.Drawing.Icon)(obj)); } } /// <summary> /// Looks up a localized string similar to peptides. /// </summary> public static string Skyline_peptides { get { return ResourceManager.GetString("Skyline_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to protein. /// </summary> public static string Skyline_protein { get { return ResourceManager.GetString("Skyline_protein", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Skyline_Release { get { object obj = ResourceManager.GetObject("Skyline_Release", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Icon similar to (Icon). /// </summary> public static System.Drawing.Icon Skyline_Release1 { get { object obj = ResourceManager.GetObject("Skyline_Release1", resourceCulture); return ((System.Drawing.Icon)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Icon similar to (Icon). /// </summary> public static System.Drawing.Icon SkylineData { get { object obj = ResourceManager.GetObject("SkylineData", resourceCulture); return ((System.Drawing.Icon)(obj)); } } /// <summary> /// Looks up a localized string similar to Item in list &apos;{0}&apos;. /// </summary> public static string SkylineDataSchema_GetTypeDescription_Item_in_list___0__ { get { return ResourceManager.GetString("SkylineDataSchema_GetTypeDescription_Item_in_list___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document was modified in the middle of the operation.. /// </summary> public static string SkylineDataSchema_VerifyDocumentCurrent_The_document_was_modified_in_the_middle_of_the_operation_ { get { return ResourceManager.GetString("SkylineDataSchema_VerifyDocumentCurrent_The_document_was_modified_in_the_middle_o" + "f_the_operation_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Icon similar to (Icon). /// </summary> public static System.Drawing.Icon SkylineDoc { get { object obj = ResourceManager.GetObject("SkylineDoc", resourceCulture); return ((System.Drawing.Icon)(obj)); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} things?. /// </summary> public static string SkylineDocNode_GetGenericDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__things_ { get { return ResourceManager.GetString("SkylineDocNode_GetGenericDeleteConfirmation_Are_you_sure_you_want_to_delete_these" + "__0__things_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap SkylineImg { get { object obj = ResourceManager.GetObject("SkylineImg", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Please save a new file to work with.. /// </summary> public static string SkylineStartup_SaveFileDlg_Please_save_a_new_file_to_work_with_ { get { return ResourceManager.GetString("SkylineStartup_SaveFileDlg_Please_save_a_new_file_to_work_with_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Blank Document. /// </summary> public static string SkylineStartup_SkylineStartup_Blank_Document { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Blank_Document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import DDA Peptide Search. /// </summary> public static string SkylineStartup_SkylineStartup_Import_DDA_Peptide_Search { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Import_DDA_Peptide_Search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import FASTA. /// </summary> public static string SkylineStartup_SkylineStartup_Import_FASTA { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Import_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Peptide List. /// </summary> public static string SkylineStartup_SkylineStartup_Import_Peptide_List { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Import_Peptide_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Protein List. /// </summary> public static string SkylineStartup_SkylineStartup_Import_Protein_List { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Import_Protein_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Transition List. /// </summary> public static string SkylineStartup_SkylineStartup_Import_Transition_List { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Import_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start a new Skyline document from a complete transition list with peptide sequences, precursor m/z values, and product m/z values, which you can paste into a grid.. /// </summary> public static string SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_from_a_complete_transition_list_with_peptide_sequences__precursor_m_z_values__and_product_m_z_values__which_you_can_paste_into_a_grid_ { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_from_a_complete_transi" + "tion_list_with_peptide_sequences__precursor_m_z_values__and_product_m_z_values__" + "which_you_can_paste_into_a_grid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start a new Skyline document with target proteins specified in a tabular list you can paste into a grid.. /// </summary> public static string SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_target_proteins_specified_in_a_tabular_list_you_can_paste_into_a_grid_ { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_target_proteins_s" + "pecified_in_a_tabular_list_you_can_paste_into_a_grid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start a new Skyline document with target proteins specified in FASTA format.. /// </summary> public static string SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_target_proteins_specified_in_FASTA_format_ { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_target_proteins_s" + "pecified_in_FASTA_format_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Start a new Skyline document with targets specified as a list of peptide sequences in a tabular list you can paste into a grid.. /// </summary> public static string SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_targets_specified_as_a_list_of_peptide_sequences_in_a_tabular_list_you_can_paste_into_a_grid_ { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Start_a_new_Skyline_document_with_targets_specified" + "_as_a_list_of_peptide_sequences_in_a_tabular_list_you_can_paste_into_a_grid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use the Skyline Import Peptide Search wizard to build a spectral library from peptide search results on DDA data, and then import the raw data to quantify peptides using Skyline MS1 Filtering.. /// </summary> public static string SkylineStartup_SkylineStartup_Use_the_Skyline_Import_Peptide_Search_wizard_to_build_a_spectral_library_from_peptide_search_results_on_DDA_data__and_then_import_the_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_ { get { return ResourceManager.GetString("SkylineStartup_SkylineStartup_Use_the_Skyline_Import_Peptide_Search_wizard_to_bui" + "ld_a_spectral_library_from_peptide_search_results_on_DDA_data__and_then_import_t" + "he_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Current ({0}). /// </summary> public static string SkylineVersion_GetCurrentVersionName_Current___0__ { get { return ResourceManager.GetString("SkylineVersion_GetCurrentVersionName_Current___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Developer Build. /// </summary> public static string SkylineVersion_GetCurrentVersionName_Developer_Build { get { return ResourceManager.GetString("SkylineVersion_GetCurrentVersionName_Developer_Build", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 19.1. /// </summary> public static string SkylineVersion_V19_1_Skyline_19_1 { get { return ResourceManager.GetString("SkylineVersion_V19_1_Skyline_19_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 20.1. /// </summary> public static string SkylineVersion_V20_1_Skyline_20_1 { get { return ResourceManager.GetString("SkylineVersion_V20_1_Skyline_20_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 20.2. /// </summary> public static string SkylineVersion_V20_2_Skyline_20_2 { get { return ResourceManager.GetString("SkylineVersion_V20_2_Skyline_20_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 3.6. /// </summary> public static string SkylineVersion_V3_6_Skyline_3_6 { get { return ResourceManager.GetString("SkylineVersion_V3_6_Skyline_3_6", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 3.7. /// </summary> public static string SkylineVersion_V3_7_Skyline_3_7 { get { return ResourceManager.GetString("SkylineVersion_V3_7_Skyline_3_7", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 4.1. /// </summary> public static string SkylineVersion_V4_1_Skyline_4_1 { get { return ResourceManager.GetString("SkylineVersion_V4_1_Skyline_4_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline 4.2. /// </summary> public static string SkylineVersion_V4_2_Skyline_4_2 { get { return ResourceManager.GetString("SkylineVersion_V4_2_Skyline_4_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change Document Reports. /// </summary> public static string SkylineViewContext_ChangeDocumentViewSpec_Change_Document_Reports { get { return ResourceManager.GetString("SkylineViewContext_ChangeDocumentViewSpec_Change_Document_Reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete items. /// </summary> public static string SkylineViewContext_DeleteDocNodes_Delete_items { get { return ResourceManager.GetString("SkylineViewContext_DeleteDocNodes_Delete_items", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule Lists. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Molecule_Lists { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Molecule_Lists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecules. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Molecules { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Peptides { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursors. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Precursors { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Precursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteins. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Proteins { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicates. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Replicates { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transitions. /// </summary> public static string SkylineViewContext_GetDocumentGridRowSources_Transitions { get { return ResourceManager.GetString("SkylineViewContext_GetDocumentGridRowSources_Transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mixed Transition List. /// </summary> public static string SkylineViewContext_GetTransitionListReportSpec_Mixed_Transition_List { get { return ResourceManager.GetString("SkylineViewContext_GetTransitionListReportSpec_Mixed_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide Transition List. /// </summary> public static string SkylineViewContext_GetTransitionListReportSpec_Peptide_Transition_List { get { return ResourceManager.GetString("SkylineViewContext_GetTransitionListReportSpec_Peptide_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Small Molecule Transition List. /// </summary> public static string SkylineViewContext_GetTransitionListReportSpec_Small_Molecule_Transition_List { get { return ResourceManager.GetString("SkylineViewContext_GetTransitionListReportSpec_Small_Molecule_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading {0}.. /// </summary> public static string SkylineViewContext_ImportViews_Failure_loading__0__ { get { return ResourceManager.GetString("SkylineViewContext_ImportViews_Failure_loading__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No views were found in that file.. /// </summary> public static string SkylineViewContext_ImportViews_No_views_were_found_in_that_file_ { get { return ResourceManager.GetString("SkylineViewContext_ImportViews_No_views_were_found_in_that_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change Document Reports. /// </summary> public static string SkylineViewContext_SaveViewSpecList_Change_Document_Reports { get { return ResourceManager.GetString("SkylineViewContext_SaveViewSpecList_Change_Document_Reports", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Accept peptides. /// </summary> public static string SkylineWindow_acceptPeptidesMenuItem_Click_Accept_peptides { get { return ResourceManager.GetString("SkylineWindow_acceptPeptidesMenuItem_Click_Accept_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All Replicates. /// </summary> public static string SkylineWindow_AddGroupByMenuItems_All_Replicates { get { return ResourceManager.GetString("SkylineWindow_AddGroupByMenuItems_All_Replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Fold Change. /// </summary> public static string SkylineWindow_AddGroupComparison_Add_Fold_Change { get { return ResourceManager.GetString("SkylineWindow_AddGroupComparison_Add_Fold_Change", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Imported peptide {0} with iRT library value is already being used as an iRT standard.. /// </summary> public static string SkylineWindow_AddIrtPeptides_Imported_peptide__0__with_iRT_library_value_is_already_being_used_as_an_iRT_standard_ { get { return ResourceManager.GetString("SkylineWindow_AddIrtPeptides_Imported_peptide__0__with_iRT_library_value_is_alrea" + "dy_being_used_as_an_iRT_standard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add custom product ion {0}. /// </summary> public static string SkylineWindow_AddMolecule_Add_custom_product_ion__0_ { get { return ResourceManager.GetString("SkylineWindow_AddMolecule_Add_custom_product_ion__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Transition. /// </summary> public static string SkylineWindow_AddMolecule_Add_Transition { get { return ResourceManager.GetString("SkylineWindow_AddMolecule_Add_Transition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom molecules cannot be added to a peptide list.. /// </summary> public static string SkylineWindow_AddMolecule_Custom_molecules_cannot_be_added_to_a_peptide_list_ { get { return ResourceManager.GetString("SkylineWindow_AddMolecule_Custom_molecules_cannot_be_added_to_a_peptide_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Custom molecules cannot be added to a protein.. /// </summary> public static string SkylineWindow_AddMolecule_Custom_molecules_cannot_be_added_to_a_protein_ { get { return ResourceManager.GetString("SkylineWindow_AddMolecule_Custom_molecules_cannot_be_added_to_a_protein_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z for this molecule is out of range for your instrument settings.. /// </summary> public static string SkylineWindow_AddMolecule_The_precursor_m_z_for_this_molecule_is_out_of_range_for_your_instrument_settings_ { get { return ResourceManager.GetString("SkylineWindow_AddMolecule_The_precursor_m_z_for_this_molecule_is_out_of_range_for" + "_your_instrument_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Precursor. /// </summary> public static string SkylineWindow_AddSmallMolecule_Add_Precursor { get { return ResourceManager.GetString("SkylineWindow_AddSmallMolecule_Add_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add molecule {0}. /// </summary> public static string SkylineWindow_AddSmallMolecule_Add_small_molecule__0_ { get { return ResourceManager.GetString("SkylineWindow_AddSmallMolecule_Add_small_molecule__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Molecule and Precursor. /// </summary> public static string SkylineWindow_AddSmallMolecule_Add_Small_Molecule_and_Precursor { get { return ResourceManager.GetString("SkylineWindow_AddSmallMolecule_Add_Small_Molecule_and_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add molecule precursor {0}. /// </summary> public static string SkylineWindow_AddSmallMolecule_Add_small_molecule_precursor__0_ { get { return ResourceManager.GetString("SkylineWindow_AddSmallMolecule_Add_small_molecule_precursor__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add standard peptides. /// </summary> public static string SkylineWindow_AddStandardsToDocument_Add_standard_peptides { get { return ResourceManager.GetString("SkylineWindow_AddStandardsToDocument_Add_standard_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Align Times To {0}. /// </summary> public static string SkylineWindow_AlignTimesToFileFormat { get { return ResourceManager.GetString("SkylineWindow_AlignTimesToFileFormat", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Applying Peak. /// </summary> public static string SkylineWindow_ApplyPeak_Applying_Peak { get { return ResourceManager.GetString("SkylineWindow_ApplyPeak_Applying_Peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to apply peak.. /// </summary> public static string SkylineWindow_ApplyPeak_Failed_to_apply_peak_ { get { return ResourceManager.GetString("SkylineWindow_ApplyPeak_Failed_to_apply_peak_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results found for the precursor {0} in the file {1}.. /// </summary> public static string SkylineWindow_ApplyPeak_No_results_found_for_the_precursor__0__in_the_file__1__ { get { return ResourceManager.GetString("SkylineWindow_ApplyPeak_No_results_found_for_the_precursor__0__in_the_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show Expected. /// </summary> public static string SkylineWindow_BuildAreaGraphMenu_Show_Expected { get { return ResourceManager.GetString("SkylineWindow_BuildAreaGraphMenu_Show_Expected", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show Library. /// </summary> public static string SkylineWindow_BuildAreaGraphMenu_Show_Library { get { return ResourceManager.GetString("SkylineWindow_BuildAreaGraphMenu_Show_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Apply Peak to . /// </summary> public static string SkylineWindow_BuildChromatogramMenu_Apply_Peak_to_ { get { return ResourceManager.GetString("SkylineWindow_BuildChromatogramMenu_Apply_Peak_to_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Store Panorama upload location. /// </summary> public static string SkylineWindow_ChangeDocPanoramaUri_Store_Panorama_upload_location { get { return ResourceManager.GetString("SkylineWindow_ChangeDocPanoramaUri_Store_Panorama_upload_location", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change settings. /// </summary> public static string SkylineWindow_ChangeSettings_Change_settings { get { return ResourceManager.GetString("SkylineWindow_ChangeSettings_Change_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click OK to open the document anyway.. /// </summary> public static string SkylineWindow_CheckResults_Click_OK_to_open_the_document_anyway { get { return ResourceManager.GetString("SkylineWindow_CheckResults_Click_OK_to_open_the_document_anyway", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The data file &apos;{0}&apos; is missing, and the following original instrument output could not be found:. /// </summary> public static string SkylineWindow_CheckResults_The_data_file___0___is_missing__and_the_following_original_instrument_output_could_not_be_found_ { get { return ResourceManager.GetString("SkylineWindow_CheckResults_The_data_file___0___is_missing__and_the_following_orig" + "inal_instrument_output_could_not_be_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The Transition Settings Full-Scan retention time filter is set to use the predicted retention time, but no prediction algorithm has been specified. ///Go to the Peptide Settings Prediction tab to fix.. /// </summary> public static string SkylineWindow_CheckRetentionTimeFilter_NoPredictionAlgorithm { get { return ResourceManager.GetString("SkylineWindow_CheckRetentionTimeFilter_NoPredictionAlgorithm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The Transition Settings Full-Scan retention time filter is set to use the predicted retention time, but the prediction algorithm has not been calibrated. Do you want to generate full gradient chromatograms?. /// </summary> public static string SkylineWindow_CheckRetentionTimeFilter_NoReplicatesAvailableForPrediction { get { return ResourceManager.GetString("SkylineWindow_CheckRetentionTimeFilter_NoReplicatesAvailableForPrediction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to save changes?. /// </summary> public static string SkylineWindow_CheckSaveDocument_Do_you_want_to_save_changes { get { return ResourceManager.GetString("SkylineWindow_CheckSaveDocument_Do_you_want_to_save_changes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No. /// </summary> public static string SkylineWindow_CheckSaveDocument_No { get { return ResourceManager.GetString("SkylineWindow_CheckSaveDocument_No", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Yes. /// </summary> public static string SkylineWindow_CheckSaveDocument_Yes { get { return ResourceManager.GetString("SkylineWindow_CheckSaveDocument_Yes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to /// ///{0} ran out of memory.. /// </summary> public static string SkylineWindow_CompleteProgressUI_Ran_Out_Of_Memory { get { return ResourceManager.GetString("SkylineWindow_CompleteProgressUI_Ran_Out_Of_Memory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to /// ///You may be able to avoid this problem by installing a 64-bit version of {0}.. /// </summary> public static string SkylineWindow_CompleteProgressUI_version_issue { get { return ResourceManager.GetString("SkylineWindow_CompleteProgressUI_version_issue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find the spectral library {0} for this document. Without the library, no spectrum ID information will be available.. /// </summary> public static string SkylineWindow_ConnectLibrarySpecs_Could_not_find_the_spectral_library__0__for_this_document__Without_the_library__no_spectrum_ID_information_will_be_available_ { get { return ResourceManager.GetString("SkylineWindow_ConnectLibrarySpecs_Could_not_find_the_spectral_library__0__for_thi" + "s_document__Without_the_library__no_spectrum_ID_information_will_be_available_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find Spectral Library. /// </summary> public static string SkylineWindow_ConnectLibrarySpecs_Find_Spectral_Library { get { return ResourceManager.GetString("SkylineWindow_ConnectLibrarySpecs_Find_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectral Library. /// </summary> public static string SkylineWindow_ConnectLibrarySpecs_Spectral_Library { get { return ResourceManager.GetString("SkylineWindow_ConnectLibrarySpecs_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Detection. /// </summary> public static string SkylineWindow_CreateGraphDetections_Counts { get { return ResourceManager.GetString("SkylineWindow_CreateGraphDetections_Counts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass Errors. /// </summary> public static string SkylineWindow_CreateGraphMassError_Mass_Errors { get { return ResourceManager.GetString("SkylineWindow_CreateGraphMassError_Mass_Errors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peak Areas. /// </summary> public static string SkylineWindow_CreateGraphPeakArea_Peak_Areas { get { return ResourceManager.GetString("SkylineWindow_CreateGraphPeakArea_Peak_Areas", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retention Times. /// </summary> public static string SkylineWindow_CreateGraphRetentionTime_Retention_Times { get { return ResourceManager.GetString("SkylineWindow_CreateGraphRetentionTime_Retention_Times", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Set regression {0}. /// </summary> public static string SkylineWindow_CreateRegression_Set_regression__0__ { get { return ResourceManager.GetString("SkylineWindow_CreateRegression_Set_regression__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The DocumentUI property may only be accessed on the UI thread.. /// </summary> public static string SkylineWindow_DocumentUI_The_DocumentUI_property_may_only_be_accessed_on_the_UI_thread { get { return ResourceManager.GetString("SkylineWindow_DocumentUI_The_DocumentUI_property_may_only_be_accessed_on_the_UI_t" + "hread", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Delete {0}. /// </summary> public static string SkylineWindow_EditDelete_Delete__0__ { get { return ResourceManager.GetString("SkylineWindow_EditDelete_Delete__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to items. /// </summary> public static string SkylineWindow_EditDelete_items { get { return ResourceManager.GetString("SkylineWindow_EditDelete_items", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Note. /// </summary> public static string SkylineWindow_EditNote_Edit_Note { get { return ResourceManager.GetString("SkylineWindow_EditNote_Edit_Note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Apply Peak to Group. /// </summary> public static string SkylineWindow_editToolStripMenuItem_DropDownOpening_Apply_Peak_to_Group { get { return ResourceManager.GetString("SkylineWindow_editToolStripMenuItem_DropDownOpening_Apply_Peak_to_Group", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Lists. /// </summary> public static string SkylineWindow_expandAllMenuItem_DropDownOpening__Lists { get { return ResourceManager.GetString("SkylineWindow_expandAllMenuItem_DropDownOpening__Lists", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Molecules. /// </summary> public static string SkylineWindow_expandAllMenuItem_DropDownOpening__Molecules { get { return ResourceManager.GetString("SkylineWindow_expandAllMenuItem_DropDownOpening__Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The number of targets exceeds the limit for this operation.. /// </summary> public static string SkylineWindow_ExpandProteins_The_number_of_targets_exceeds_the_limit_for_this_operation_ { get { return ResourceManager.GetString("SkylineWindow_ExpandProteins_The_number_of_targets_exceeds_the_limit_for_this_ope" + "ration_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is no isolation list data to export.. /// </summary> public static string SkylineWindow_exportIsolationListMenuItem_Click_There_is_no_isolation_list_data_to_export { get { return ResourceManager.GetString("SkylineWindow_exportIsolationListMenuItem_Click_There_is_no_isolation_list_data_t" + "o_export", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Background Proteome. /// </summary> public static string SkylineWindow_FindBackgroundProteome_Background_Proteome { get { return ResourceManager.GetString("SkylineWindow_FindBackgroundProteome_Background_Proteome", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find Background Proteome. /// </summary> public static string SkylineWindow_FindBackgroundProteome_Find_Background_Proteome { get { return ResourceManager.GetString("SkylineWindow_FindBackgroundProteome_Find_Background_Proteome", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Proteome File. /// </summary> public static string SkylineWindow_FindBackgroundProteome_Proteome_File { get { return ResourceManager.GetString("SkylineWindow_FindBackgroundProteome_Proteome_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ion mobility library files. /// </summary> public static string SkylineWindow_FindIonMobilityDatabase_ion_mobility_library_files { get { return ResourceManager.GetString("SkylineWindow_FindIonMobilityDatabase_ion_mobility_library_files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The ion mobility library specified could not be opened:. /// </summary> public static string SkylineWindow_FindIonMobilityDatabase_The_ion_mobility_library_specified_could_not_be_opened_ { get { return ResourceManager.GetString("SkylineWindow_FindIonMobilityDatabase_The_ion_mobility_library_specified_could_no" + "t_be_opened_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find Ion Mobility Library. /// </summary> public static string SkylineWindow_FindIonMobilityLibrary_Find_Ion_Mobility_Library { get { return ResourceManager.GetString("SkylineWindow_FindIonMobilityLibrary_Find_Ion_Mobility_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion Mobility Library. /// </summary> public static string SkylineWindow_FindIonMobilityLibrary_Ion_Mobility_Library { get { return ResourceManager.GetString("SkylineWindow_FindIonMobilityLibrary_Ion_Mobility_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find iRT Calculator. /// </summary> public static string SkylineWindow_FindIrtDatabase_Find_iRT_Calculator { get { return ResourceManager.GetString("SkylineWindow_FindIrtDatabase_Find_iRT_Calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT Calculator. /// </summary> public static string SkylineWindow_FindIrtDatabase_iRT_Calculator { get { return ResourceManager.GetString("SkylineWindow_FindIrtDatabase_iRT_Calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to iRT Database Files. /// </summary> public static string SkylineWindow_FindIrtDatabase_iRT_Database_Files { get { return ResourceManager.GetString("SkylineWindow_FindIrtDatabase_iRT_Database_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The database file specified could not be opened:. /// </summary> public static string SkylineWindow_FindIrtDatabase_The_database_file_specified_could_not_be_opened { get { return ResourceManager.GetString("SkylineWindow_FindIrtDatabase_The_database_file_specified_could_not_be_opened", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Find Optimization Library. /// </summary> public static string SkylineWindow_FindOptimizationDatabase_Find_Optimization_Library { get { return ResourceManager.GetString("SkylineWindow_FindOptimizationDatabase_Find_Optimization_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimization Library. /// </summary> public static string SkylineWindow_FindOptimizationDatabase_Optimization_Library { get { return ResourceManager.GetString("SkylineWindow_FindOptimizationDatabase_Optimization_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimization Library Files. /// </summary> public static string SkylineWindow_FindOptimizationDatabase_Optimization_Library_Files { get { return ResourceManager.GetString("SkylineWindow_FindOptimizationDatabase_Optimization_Library_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The database file specified could not be opened:. /// </summary> public static string SkylineWindow_FindOptimizationDatabase_The_database_file_specified_could_not_be_opened_ { get { return ResourceManager.GetString("SkylineWindow_FindOptimizationDatabase_The_database_file_specified_could_not_be_o" + "pened_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to continue?. /// </summary> public static string SkylineWindow_generateDecoysMenuItem_Click_Are_you_sure_you_want_to_continue { get { return ResourceManager.GetString("SkylineWindow_generateDecoysMenuItem_Click_Are_you_sure_you_want_to_continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain peptides to generate decoys.. /// </summary> public static string SkylineWindow_generateDecoysMenuItem_Click_The_document_must_contain_peptides_to_generate_decoys_ { get { return ResourceManager.GetString("SkylineWindow_generateDecoysMenuItem_Click_The_document_must_contain_peptides_to_" + "generate_decoys_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This operation will replace the existing decoys.. /// </summary> public static string SkylineWindow_generateDecoysMenuItem_Click_This_operation_will_replace_the_existing_decoys { get { return ResourceManager.GetString("SkylineWindow_generateDecoysMenuItem_Click_This_operation_will_replace_the_existi" + "ng_decoys", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change peak end to {0:F01}. /// </summary> public static string SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_end_to__0_F01_ { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_end_to__0_F01_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change peak start to {0:F01}. /// </summary> public static string SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_start_to__0_F01_ { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_start_to__0_F01_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change peak to {0:F01}-{1:F01}. /// </summary> public static string SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_to__0_F01___1_F01_ { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peak_to__0_F01___1_F01_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change peaks. /// </summary> public static string SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peaks { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ChangedPeakBounds_Change_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove peak. /// </summary> public static string SkylineWindow_graphChromatogram_ChangedPeakBounds_Remove_peak { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ChangedPeakBounds_Remove_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The raw file must be re-imported in order to show full scans: {0}. /// </summary> public static string SkylineWindow_graphChromatogram_ClickedChromatogram_The_raw_file_must_be_re_imported_in_order_to_show_full_scans___0_ { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_ClickedChromatogram_The_raw_file_must_be_re_impor" + "ted_in_order_to_show_full_scans___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pick peak {0:F01}. /// </summary> public static string SkylineWindow_graphChromatogram_PickedPeak_Pick_peak__0_F01_ { get { return ResourceManager.GetString("SkylineWindow_graphChromatogram_PickedPeak_Pick_peak__0_F01_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Opening a document inside a ZIP file is not supported.. /// </summary> public static string SkylineWindow_HasFileToOpen_Opening_a_document_inside_a_ZIP_file_is_not_supported_ { get { return ResourceManager.GetString("SkylineWindow_HasFileToOpen_Opening_a_document_inside_a_ZIP_file_is_not_supported" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The path to the file to open is too long.. /// </summary> public static string SkylineWindow_HasFileToOpen_The_path_to_the_file_to_open_is_too_long_ { get { return ResourceManager.GetString("SkylineWindow_HasFileToOpen_The_path_to_the_file_to_open_is_too_long_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unzip the file {0} first and then open the extracted file {1}.. /// </summary> public static string SkylineWindow_HasFileToOpen_Unzip_the_file__0__first_and_then_open_the_extracted_file__1__ { get { return ResourceManager.GetString("SkylineWindow_HasFileToOpen_Unzip_the_file__0__first_and_then_open_the_extracted_" + "file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Annotations. /// </summary> public static string SkylineWindow_ImportAnnotations_Import_Annotations { get { return ResourceManager.GetString("SkylineWindow_ImportAnnotations_Import_Annotations", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is an existing library with the same name {0} as the document library to be created. Overwrite?. /// </summary> public static string SkylineWindow_ImportAssayLibrary_There_is_an_existing_library_with_the_same_name__0__as_the_document_library_to_be_created__Overwrite_ { get { return ResourceManager.GetString("SkylineWindow_ImportAssayLibrary_There_is_an_existing_library_with_the_same_name_" + "_0__as_the_document_library_to_be_created__Overwrite_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must save the Skyline document in order to import an assay library.. /// </summary> public static string SkylineWindow_ImportAssayLibrary_You_must_save_the_Skyline_document_in_order_to_import_an_assay_library_ { get { return ResourceManager.GetString("SkylineWindow_ImportAssayLibrary_You_must_save_the_Skyline_document_in_order_to_i" + "mport_an_assay_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Assay Library. /// </summary> public static string SkylineWindow_importAssayLibraryMenuItem_Click_Assay_Library { get { return ResourceManager.GetString("SkylineWindow_importAssayLibraryMenuItem_Click_Assay_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Assay Library. /// </summary> public static string SkylineWindow_importAssayLibraryMenuItem_Click_Import_Assay_Library { get { return ResourceManager.GetString("SkylineWindow_importAssayLibraryMenuItem_Click_Import_Assay_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed importing file {0}. {1}. /// </summary> public static string SkylineWindow_importDocumentMenuItem_Click_Failed_importing_file__0__1__ { get { return ResourceManager.GetString("SkylineWindow_importDocumentMenuItem_Click_Failed_importing_file__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed importing files:. /// </summary> public static string SkylineWindow_importDocumentMenuItem_Click_Failed_importing_files { get { return ResourceManager.GetString("SkylineWindow_importDocumentMenuItem_Click_Failed_importing_files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Skyline Document. /// </summary> public static string SkylineWindow_importDocumentMenuItem_Click_Import_Skyline_Document { get { return ResourceManager.GetString("SkylineWindow_importDocumentMenuItem_Click_Import_Skyline_Document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OK. /// </summary> public static string SkylineWindow_ImportFasta_OK { get { return ResourceManager.GetString("SkylineWindow_ImportFasta_OK", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This operation discarded {0} proteins with no peptides matching the current filter settings.. /// </summary> public static string SkylineWindow_ImportFasta_This_operation_discarded__0__proteins_with_no_peptides_matching_the_current_filter_settings_ { get { return ResourceManager.GetString("SkylineWindow_ImportFasta_This_operation_discarded__0__proteins_with_no_peptides_" + "matching_the_current_filter_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected document change during operation.. /// </summary> public static string SkylineWindow_ImportFasta_Unexpected_document_change_during_operation { get { return ResourceManager.GetString("SkylineWindow_ImportFasta_Unexpected_document_change_during_operation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to use the Unimod definitions for the following modifications?. /// </summary> public static string SkylineWindow_ImportFasta_Would_you_like_to_use_the_Unimod_definitions_for_the_following_modifications { get { return ResourceManager.GetString("SkylineWindow_ImportFasta_Would_you_like_to_use_the_Unimod_definitions_for_the_fo" + "llowing_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading the file {0}. {1}. /// </summary> public static string SkylineWindow_ImportFastaFile_Failed_reading_the_file__0__1__ { get { return ResourceManager.GetString("SkylineWindow_ImportFastaFile_Failed_reading_the_file__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import FASTA. /// </summary> public static string SkylineWindow_ImportFastaFile_Import_FASTA { get { return ResourceManager.GetString("SkylineWindow_ImportFastaFile_Import_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Skyline document data. /// </summary> public static string SkylineWindow_ImportFiles_Import_Skyline_document_data { get { return ResourceManager.GetString("SkylineWindow_ImportFiles_Import_Skyline_document_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing {0}. /// </summary> public static string SkylineWindow_ImportFiles_Importing__0__ { get { return ResourceManager.GetString("SkylineWindow_ImportFiles_Importing__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} transitions contained errors.. /// </summary> public static string SkylineWindow_ImportMassList__0__transitions_contained_errors_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__0__transitions_contained_errors_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} transitions contained errors. Skip these {0} transitions and import the rest?. /// </summary> public static string SkylineWindow_ImportMassList__0__transitions_contained_errors__Skip_these__0__transitions_and_import_the_rest_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__0__transitions_contained_errors__Skip_these__0__tra" + "nsitions_and_import_the_rest_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Create.... /// </summary> public static string SkylineWindow_ImportMassList__Create___ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__Create___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Keep. /// </summary> public static string SkylineWindow_ImportMassList__Keep { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__Keep", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Overwrite. /// </summary> public static string SkylineWindow_ImportMassList__Overwrite { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__Overwrite", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Skip. /// </summary> public static string SkylineWindow_ImportMassList__Skip { get { return ResourceManager.GetString("SkylineWindow_ImportMassList__Skip", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add. /// </summary> public static string SkylineWindow_ImportMassList_Add { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Add", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding iRT values. /// </summary> public static string SkylineWindow_ImportMassList_Adding_iRT_values_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Adding_iRT_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading the file. Peptide {0} matches an existing iRT standard peptide.. /// </summary> public static string SkylineWindow_ImportMassList_Failed_reading_the_file___Peptide__0__matches_an_existing_iRT_standard_peptide_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Failed_reading_the_file___Peptide__0__matches_an_exi" + "sting_iRT_standard_peptide_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Finishing up import. /// </summary> public static string SkylineWindow_ImportMassList_Finishing_up_import { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Finishing_up_import", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Keep the existing iRT value or overwrite with the imported value?. /// </summary> public static string SkylineWindow_ImportMassList_Keep_the_existing_iRT_value_or_overwrite_with_the_imported_value_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Keep_the_existing_iRT_value_or_overwrite_with_the_im" + "ported_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Keep the existing iRT values or overwrite with imported values?. /// </summary> public static string SkylineWindow_ImportMassList_Keep_the_existing_iRT_values_or_overwrite_with_imported_values_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Keep_the_existing_iRT_values_or_overwrite_with_impor" + "ted_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file does not contain intensities. Valid column names for intensities are:. /// </summary> public static string SkylineWindow_ImportMassList_The_file_does_not_contain_intensities__Valid_column_names_for_intensities_are_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_file_does_not_contain_intensities__Valid_column_" + "names_for_intensities_are_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file does not contain iRTs. Valid column names for iRTs are:. /// </summary> public static string SkylineWindow_ImportMassList_The_file_does_not_contain_iRTs__Valid_column_names_for_iRTs_are_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_file_does_not_contain_iRTs__Valid_column_names_f" + "or_iRTs_are_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The iRT calculator already contains {0} of the imported peptides.. /// </summary> public static string SkylineWindow_ImportMassList_The_iRT_calculator_already_contains__0__of_the_imported_peptides_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_iRT_calculator_already_contains__0__of_the_impor" + "ted_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The transition list appears to contain iRT library values. Add these iRT values to the iRT calculator?. /// </summary> public static string SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_library_values___Add_these_iRT_values_to_the_iRT_calculator_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_library_v" + "alues___Add_these_iRT_values_to_the_iRT_calculator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The transition list appears to contain iRT values, but the document does not have an iRT calculator. Create a new calculator and add these iRT values?. /// </summary> public static string SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_values__but_the_document_does_not_have_an_iRT_calculator___Create_a_new_calculator_and_add_these_iRT_values_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_values__b" + "ut_the_document_does_not_have_an_iRT_calculator___Create_a_new_calculator_and_ad" + "d_these_iRT_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The transition list appears to contain spectral library intensities. Create a document library from these intensities?. /// </summary> public static string SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_spectral_library_intensities___Create_a_document_library_from_these_intensities_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_spectral_libr" + "ary_intensities___Create_a_document_library_from_these_intensities_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There is an existing library with the same name {0} as the document library to be created. Overwrite this library or skip import of library intensities?. /// </summary> public static string SkylineWindow_ImportMassList_There_is_an_existing_library_with_the_same_name__0__as_the_document_library_to_be_created___Overwrite_this_library_or_skip_import_of_library_intensities_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_There_is_an_existing_library_with_the_same_name__0__" + "as_the_document_library_to_be_created___Overwrite_this_library_or_skip_import_of" + "_library_intensities_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected document change during operation: {0}. /// </summary> public static string SkylineWindow_ImportMassList_Unexpected_document_change_during_operation___0_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_Unexpected_document_change_during_operation___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must save the Skyline document in order to create a spectral library from a transition list.. /// </summary> public static string SkylineWindow_ImportMassList_You_must_save_the_Skyline_document_in_order_to_create_a_spectral_library_from_a_transition_list_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassList_You_must_save_the_Skyline_document_in_order_to_creat" + "e_a_spectral_library_from_a_transition_list_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Creating Spectral Library. /// </summary> public static string SkylineWindow_ImportMassListIntensities_Creating_Spectral_Library { get { return ResourceManager.GetString("SkylineWindow_ImportMassListIntensities_Creating_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The standard peptides do not appear to be on the iRT-C18 scale. Would you like to recalibrate them to this scale?. /// </summary> public static string SkylineWindow_ImportMassListIrts_The_standard_peptides_do_not_appear_to_be_on_the_iRT_C18_scale__Would_you_like_to_recalibrate_them_to_this_scale_ { get { return ResourceManager.GetString("SkylineWindow_ImportMassListIrts_The_standard_peptides_do_not_appear_to_be_on_the" + "_iRT_C18_scale__Would_you_like_to_recalibrate_them_to_this_scale_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data columns not found in first line.. /// </summary> public static string SkylineWindow_importMassListMenuItem_Click_Data_columns_not_found_in_first_line { get { return ResourceManager.GetString("SkylineWindow_importMassListMenuItem_Click_Data_columns_not_found_in_first_line", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import transition list. /// </summary> public static string SkylineWindow_importMassListMenuItem_Click_Import_transition_list { get { return ResourceManager.GetString("SkylineWindow_importMassListMenuItem_Click_Import_transition_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Transition List. /// </summary> public static string SkylineWindow_importMassListMenuItem_Click_Import_Transition_List_title { get { return ResourceManager.GetString("SkylineWindow_importMassListMenuItem_Click_Import_Transition_List_title", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition List. /// </summary> public static string SkylineWindow_importMassListMenuItem_Click_Transition_List { get { return ResourceManager.GetString("SkylineWindow_importMassListMenuItem_Click_Transition_List", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring these files?. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_Continue_peak_boundary_import_ignoring_these_files_ { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_Continue_peak_boundary_import_ignoring_these_f" + "iles_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue peak boundary import ignoring these peptides?. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_Continue_peak_boundary_import_ignoring_these_peptides_ { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_Continue_peak_boundary_import_ignoring_these_p" + "eptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import Peak Boundaries. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_Import_PeakBoundaries { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_Import_PeakBoundaries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following {0} file names in the peak boundaries file were not recognized:. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_The_following__0__file_names_in_the_peak_boundaries_file_were_not_recognized_ { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_The_following__0__file_names_in_the_peak_bound" + "aries_file_were_not_recognized_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following {0} peptides in the peak boundaries file were not recognized:. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_The_following__0__peptides_in_the_peak_boundaries_file_were_not_recognized__ { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_The_following__0__peptides_in_the_peak_boundar" + "ies_file_were_not_recognized__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected Document Change During Operation.. /// </summary> public static string SkylineWindow_ImportPeakBoundaries_Unexpected_document_change_during_operation { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundaries_Unexpected_document_change_during_operation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading the file {0}.. /// </summary> public static string SkylineWindow_ImportPeakBoundariesFile_Failed_reading_the_file__0__ { get { return ResourceManager.GetString("SkylineWindow_ImportPeakBoundariesFile_Failed_reading_the_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} decoys do not have a matching target. /// </summary> public static string SkylineWindow_ImportResults__0__decoys_do_not_have_a_matching_target { get { return ResourceManager.GetString("SkylineWindow_ImportResults__0__decoys_do_not_have_a_matching_target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} decoys do not have the same number of transitions as their matching targets. /// </summary> public static string SkylineWindow_ImportResults__0__decoys_do_not_have_the_same_number_of_transitions_as_their_matching_targets { get { return ResourceManager.GetString("SkylineWindow_ImportResults__0__decoys_do_not_have_the_same_number_of_transitions" + "_as_their_matching_targets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 decoy does not have a matching target. /// </summary> public static string SkylineWindow_ImportResults_1_decoy_does_not_have_a_matching_target { get { return ResourceManager.GetString("SkylineWindow_ImportResults_1_decoy_does_not_have_a_matching_target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 decoy does not have the same number of transitions as its matching target. /// </summary> public static string SkylineWindow_ImportResults_1_decoy_does_not_have_the_same_number_of_transitions_as_its_matching_target { get { return ResourceManager.GetString("SkylineWindow_ImportResults_1_decoy_does_not_have_the_same_number_of_transitions_" + "as_its_matching_target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A maximum of {0} may be missing or outliers for a successful import.. /// </summary> public static string SkylineWindow_ImportResults_A_maximum_of__0__may_be_missing_and_or_outliers_for_a_successful_import_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_A_maximum_of__0__may_be_missing_and_or_outliers_for_a" + "_successful_import_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A regression for declustering potention must be selected in the Prediction tab of the Transition Settings in order to import optimization data for decluserting potential.. /// </summary> public static string SkylineWindow_ImportResults_A_regression_for_declustering_potention_must_be_selected_in_the_Prediction_tab_of_the_Transition_Settings_in_order_to_import_optimization_data_for_decluserting_potential { get { return ResourceManager.GetString("SkylineWindow_ImportResults_A_regression_for_declustering_potention_must_be_selec" + "ted_in_the_Prediction_tab_of_the_Transition_Settings_in_order_to_import_optimiza" + "tion_data_for_decluserting_potential", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add missing iRT standard peptides to your document or change the retention time predictor.. /// </summary> public static string SkylineWindow_ImportResults_Add_missing_iRT_standard_peptides_to_your_document_or_change_the_retention_time_predictor_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Add_missing_iRT_standard_peptides_to_your_document_or" + "_change_the_retention_time_predictor_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure? Peak scoring models trained with non-matching targets and decoys may produce incorrect results.. /// </summary> public static string SkylineWindow_ImportResults_Are_you_sure__Peak_scoring_models_trained_with_non_matching_targets_and_decoys_may_produce_incorrect_results_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Are_you_sure__Peak_scoring_models_trained_with_non_ma" + "tching_targets_and_decoys_may_produce_incorrect_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Continue. /// </summary> public static string SkylineWindow_ImportResults_Continue { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue?. /// </summary> public static string SkylineWindow_ImportResults_Do_you_want_to_continue_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to generate new decoys or continue with the current decoys?. /// </summary> public static string SkylineWindow_ImportResults_Do_you_want_to_generate_new_decoys_or_continue_with_the_current_decoys_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Do_you_want_to_generate_new_decoys_or_continue_with_t" + "he_current_decoys_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Generate. /// </summary> public static string SkylineWindow_ImportResults_Generate { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Generate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import {0}. /// </summary> public static string SkylineWindow_ImportResults_Import__0__ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Import__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import results. /// </summary> public static string SkylineWindow_ImportResults_Import_results { get { return ResourceManager.GetString("SkylineWindow_ImportResults_Import_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None may be missing or outliers for a successful import.. /// </summary> public static string SkylineWindow_ImportResults_None_may_be_missing_or_outliers_for_a_successful_import_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_None_may_be_missing_or_outliers_for_a_successful_impo" + "rt_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains {0} of these iRT standard peptides.. /// </summary> public static string SkylineWindow_ImportResults_The_document_contains__0__of_these_iRT_standard_peptides_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_document_contains__0__of_these_iRT_standard_pepti" + "des_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains a decoy that does not match the targets:. /// </summary> public static string SkylineWindow_ImportResults_The_document_contains_a_decoy_that_does_not_match_the_targets_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_document_contains_a_decoy_that_does_not_match_the" + "_targets_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document contains decoys that do not match the targets. Out of {0} decoys:. /// </summary> public static string SkylineWindow_ImportResults_The_document_contains_decoys_that_do_not_match_the_targets__Out_of__0__decoys_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_document_contains_decoys_that_do_not_match_the_ta" + "rgets__Out_of__0__decoys_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document does not contain any of these iRT standard peptides.. /// </summary> public static string SkylineWindow_ImportResults_The_document_does_not_contain_any_of_these_iRT_standard_peptides_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_document_does_not_contain_any_of_these_iRT_standa" + "rd_peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document only contains {0} of these iRT standard peptides.. /// </summary> public static string SkylineWindow_ImportResults_The_document_only_contains__0__of_these_iRT_standard_peptides_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_document_only_contains__0__of_these_iRT_standard_" + "peptides_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The following iRT standard peptides are missing from the document:. /// </summary> public static string SkylineWindow_ImportResults_The_following_iRT_standard_peptides_are_missing_from_the_document_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_The_following_iRT_standard_peptides_are_missing_from_" + "the_document_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to With {0} standard peptides, {1} are required with a correlation of {2}.. /// </summary> public static string SkylineWindow_ImportResults_With__0__standard_peptides___1__are_required_with_a_correlation_of__2__ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_With__0__standard_peptides___1__are_required_with_a_c" + "orrelation_of__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must add at least one target transition before importing results.. /// </summary> public static string SkylineWindow_ImportResults_You_must_add_at_least_one_target_transition_before_importing_results_ { get { return ResourceManager.GetString("SkylineWindow_ImportResults_You_must_add_at_least_one_target_transition_before_im" + "porting_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must save this document before importing results.. /// </summary> public static string SkylineWindow_ImportResults_You_must_save_this_document_before_importing_results { get { return ResourceManager.GetString("SkylineWindow_ImportResults_You_must_save_this_document_before_importing_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Clear integrate all. /// </summary> public static string SkylineWindow_IntegrateAll_Clear_integrate_all { get { return ResourceManager.GetString("SkylineWindow_IntegrateAll_Clear_integrate_all", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Set integrate all. /// </summary> public static string SkylineWindow_IntegrateAll_Set_integrate_all { get { return ResourceManager.GetString("SkylineWindow_IntegrateAll_Set_integrate_all", resourceCulture); } } /// <summary> /// Looks up a localized string similar to In the Peptide Settings - Prediction tab, click the calculator button to edit the current iRT calculator.. /// </summary> public static string SkylineWindow_irtStandardContextMenuItem_Click_In_the_Peptide_Settings___Prediction_tab__click_the_calculator_button_to_edit_the_current_iRT_calculator_ { get { return ResourceManager.GetString("SkylineWindow_irtStandardContextMenuItem_Click_In_the_Peptide_Settings___Predicti" + "on_tab__click_the_calculator_button_to_edit_the_current_iRT_calculator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The standard peptides for an iRT calculator can only be set in the iRT calculator editor.. /// </summary> public static string SkylineWindow_irtStandardContextMenuItem_Click_The_standard_peptides_for_an_iRT_calculator_can_only_be_set_in_the_iRT_calculator_editor_ { get { return ResourceManager.GetString("SkylineWindow_irtStandardContextMenuItem_Click_The_standard_peptides_for_an_iRT_c" + "alculator_can_only_be_set_in_the_iRT_calculator_editor_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Must be called from event thread. /// </summary> public static string SkylineWindow_IsGraphUpdatePending_Must_be_called_from_event_thread { get { return ResourceManager.GetString("SkylineWindow_IsGraphUpdatePending_Must_be_called_from_event_thread", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A failure occurred attempting to re-import results.. /// </summary> public static string SkylineWindow_ManageResults_A_failure_occurred_attempting_to_reimport_results { get { return ResourceManager.GetString("SkylineWindow_ManageResults_A_failure_occurred_attempting_to_reimport_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to remove library runs from the document library.. /// </summary> public static string SkylineWindow_ManageResults_Failed_to_remove_library_runs_from_the_document_library_ { get { return ResourceManager.GetString("SkylineWindow_ManageResults_Failed_to_remove_library_runs_from_the_document_libra" + "ry_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to remove library runs from the MIDAS library.. /// </summary> public static string SkylineWindow_ManageResults_Failed_to_remove_library_runs_from_the_MIDAS_library_ { get { return ResourceManager.GetString("SkylineWindow_ManageResults_Failed_to_remove_library_runs_from_the_MIDAS_library_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Manage results. /// </summary> public static string SkylineWindow_ManageResults_Manage_results { get { return ResourceManager.GetString("SkylineWindow_ManageResults_Manage_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain mass spec data to manage results.. /// </summary> public static string SkylineWindow_ManageResults_The_document_must_contain_mass_spec_data_to_manage_results_ { get { return ResourceManager.GetString("SkylineWindow_ManageResults_The_document_must_contain_mass_spec_data_to_manage_re" + "sults_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mark transitions non-quantitative. /// </summary> public static string SkylineWindow_MarkQuantitative_Mark_transitions_non_quantitative { get { return ResourceManager.GetString("SkylineWindow_MarkQuantitative_Mark_transitions_non_quantitative", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mark transitions quantitative. /// </summary> public static string SkylineWindow_MarkQuantitative_Mark_transitions_quantitative { get { return ResourceManager.GetString("SkylineWindow_MarkQuantitative_Mark_transitions_quantitative", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to modify the document.. /// </summary> public static string SkylineWindow_ModifyDocument_Failure_attempting_to_modify_the_document { get { return ResourceManager.GetString("SkylineWindow_ModifyDocument_Failure_attempting_to_modify_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modify {0}. /// </summary> public static string SkylineWindow_ModifyPeptide_Modify__0__ { get { return ResourceManager.GetString("SkylineWindow_ModifyPeptide_Modify__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modify Molecule. /// </summary> public static string SkylineWindow_ModifyPeptide_Modify_Small_Molecule { get { return ResourceManager.GetString("SkylineWindow_ModifyPeptide_Modify_Small_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modify Custom Ion Precursor. /// </summary> public static string SkylineWindow_ModifySmallMoleculeTransitionGroup_Modify_Custom_Ion_Precursor { get { return ResourceManager.GetString("SkylineWindow_ModifySmallMoleculeTransitionGroup_Modify_Custom_Ion_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modify product ion {0}. /// </summary> public static string SkylineWindow_ModifyTransition_Modify_product_ion__0_ { get { return ResourceManager.GetString("SkylineWindow_ModifyTransition_Modify_product_ion__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modify Transition. /// </summary> public static string SkylineWindow_ModifyTransition_Modify_Transition { get { return ResourceManager.GetString("SkylineWindow_ModifyTransition_Modify_Transition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An unexpected error has prevented global settings changes from this session from being saved.. /// </summary> public static string SkylineWindow_OnClosing_An_unexpected_error_has_prevented_global_settings_changes_from_this_session_from_being_saved { get { return ResourceManager.GetString("SkylineWindow_OnClosing_An_unexpected_error_has_prevented_global_settings_changes" + "_from_this_session_from_being_saved", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure opening {0}.. /// </summary> public static string SkylineWindow_OpenFile_Failure_opening__0__ { get { return ResourceManager.GetString("SkylineWindow_OpenFile_Failure_opening__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading.... /// </summary> public static string SkylineWindow_OpenFile_Loading___ { get { return ResourceManager.GetString("SkylineWindow_OpenFile_Loading___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file you are trying to open (&quot;{0}&quot;) does not appear to be a Skyline document. Skyline documents normally have a &quot;{1}&quot; or &quot;{2}&quot; filename extension and are in XML format.. /// </summary> public static string SkylineWindow_OpenFile_The_file_you_are_trying_to_open____0____does_not_appear_to_be_a_Skyline_document__Skyline_documents_normally_have_a___1___or___2___filename_extension_and_are_in_XML_format_ { get { return ResourceManager.GetString("SkylineWindow_OpenFile_The_file_you_are_trying_to_open____0____does_not_appear_to" + "_be_a_Skyline_document__Skyline_documents_normally_have_a___1___or___2___filenam" + "e_extension_and_are_in_XML_format_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extracting Files. /// </summary> public static string SkylineWindow_OpenSharedFile_Extracting_Files { get { return ResourceManager.GetString("SkylineWindow_OpenSharedFile_Extracting_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure extracting Skyline document from zip file {0}.. /// </summary> public static string SkylineWindow_OpenSharedFile_Failure_extracting_Skyline_document_from_zip_file__0__ { get { return ResourceManager.GetString("SkylineWindow_OpenSharedFile_Failure_extracting_Skyline_document_from_zip_file__0" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The zip file {0} cannot be read.. /// </summary> public static string SkylineWindow_OpenSharedFile_The_zip_file__0__cannot_be_read { get { return ResourceManager.GetString("SkylineWindow_OpenSharedFile_The_zip_file__0__cannot_be_read", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not perform transition list paste: {0}. /// </summary> public static string SkylineWindow_Paste_Could_not_perform_transition_list_paste___0_ { get { return ResourceManager.GetString("SkylineWindow_Paste_Could_not_perform_transition_list_paste___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Empty sequence found at line {0}.. /// </summary> public static string SkylineWindow_Paste_Empty_sequence_found_at_line__0__ { get { return ResourceManager.GetString("SkylineWindow_Paste_Empty_sequence_found_at_line__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed reading Skyline document from the clipboard.. /// </summary> public static string SkylineWindow_Paste_Failed_reading_Skyline_document_from_the_clipboard_ { get { return ResourceManager.GetString("SkylineWindow_Paste_Failed_reading_Skyline_document_from_the_clipboard_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste. /// </summary> public static string SkylineWindow_Paste_Paste { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste {0}. /// </summary> public static string SkylineWindow_Paste_Paste__0__ { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste FASTA. /// </summary> public static string SkylineWindow_Paste_Paste_FASTA { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste_FASTA", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste peptide list. /// </summary> public static string SkylineWindow_Paste_Paste_peptide_list { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste_peptide_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste proteins. /// </summary> public static string SkylineWindow_Paste_Paste_proteins { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Paste transition list. /// </summary> public static string SkylineWindow_Paste_Paste_transition_list { get { return ResourceManager.GetString("SkylineWindow_Paste_Paste_transition_list", resourceCulture); } } /// <summary> /// Looks up a localized string similar to peptides. /// </summary> public static string SkylineWindow_Paste_peptides { get { return ResourceManager.GetString("SkylineWindow_Paste_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Protein sequence not found.. /// </summary> public static string SkylineWindow_Paste_Protein_sequence_not_found { get { return ResourceManager.GetString("SkylineWindow_Paste_Protein_sequence_not_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to proteins. /// </summary> public static string SkylineWindow_Paste_proteins { get { return ResourceManager.GetString("SkylineWindow_Paste_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The protein sequence must be the last value in each line.. /// </summary> public static string SkylineWindow_Paste_The_protein_sequence_must_be_the_last_value_in_each_line { get { return ResourceManager.GetString("SkylineWindow_Paste_The_protein_sequence_must_be_the_last_value_in_each_line", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected character &apos;.&apos; found.. /// </summary> public static string SkylineWindow_Paste_Unexpected_character_period_found { get { return ResourceManager.GetString("SkylineWindow_Paste_Unexpected_character_period_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Apply picked peak. /// </summary> public static string SkylineWindow_PickPeakInChromatograms_Apply_picked_peak { get { return ResourceManager.GetString("SkylineWindow_PickPeakInChromatograms_Apply_picked_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error retrieving server information: {0}. /// </summary> public static string SkylineWindow_Publish_Error_retrieving_server_information__0__ { get { return ResourceManager.GetString("SkylineWindow_Publish_Error_retrieving_server_information__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retrieving server information.. /// </summary> public static string SkylineWindow_Publish_Retrieving_server_information { get { return ResourceManager.GetString("SkylineWindow_Publish_Retrieving_server_information", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are no Panorama servers to publish to. Please add a server under Tools.. /// </summary> public static string SkylineWindow_Publish_There_are_no_Panorama_servers_to_publish_to_Please_add_a_server_under_Tools { get { return ResourceManager.GetString("SkylineWindow_Publish_There_are_no_Panorama_servers_to_publish_to_Please_add_a_se" + "rver_under_Tools", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Shared Documents. /// </summary> public static string SkylineWindow_publishToolStripMenuItem_Click_Skyline_Shared_Documents { get { return ResourceManager.GetString("SkylineWindow_publishToolStripMenuItem_Click_Skyline_Shared_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This file was last uploaded to: {0}. /// </summary> public static string SkylineWindow_PublishToSavedUri_This_file_was_last_uploaded_to___0_ { get { return ResourceManager.GetString("SkylineWindow_PublishToSavedUri_This_file_was_last_uploaded_to___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upload to the same location?. /// </summary> public static string SkylineWindow_PublishToSavedUri_Upload_to_the_same_location_ { get { return ResourceManager.GetString("SkylineWindow_PublishToSavedUri_Upload_to_the_same_location_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reimporting chromatograms. /// </summary> public static string SkylineWindow_ReimportChromatograms_Reimporting_chromatograms { get { return ResourceManager.GetString("SkylineWindow_ReimportChromatograms_Reimporting_chromatograms", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove peptides above CV cutoff. /// </summary> public static string SkylineWindow_RemoveAboveCVCutoff_Remove_peptides_above_CV_cutoff { get { return ResourceManager.GetString("SkylineWindow_RemoveAboveCVCutoff_Remove_peptides_above_CV_cutoff", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove duplicate peptides. /// </summary> public static string SkylineWindow_removeDuplicatePeptidesMenuItem_Click_Remove_duplicate_peptides { get { return ResourceManager.GetString("SkylineWindow_removeDuplicatePeptidesMenuItem_Click_Remove_duplicate_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove empty peptides. /// </summary> public static string SkylineWindow_removeEmptyPeptidesMenuItem_Click_Remove_empty_peptides { get { return ResourceManager.GetString("SkylineWindow_removeEmptyPeptidesMenuItem_Click_Remove_empty_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove empty proteins. /// </summary> public static string SkylineWindow_removeEmptyProteinsMenuItem_Click_Remove_empty_proteins { get { return ResourceManager.GetString("SkylineWindow_removeEmptyProteinsMenuItem_Click_Remove_empty_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove missing results. /// </summary> public static string SkylineWindow_RemoveMissingResults_Remove_missing_results { get { return ResourceManager.GetString("SkylineWindow_RemoveMissingResults_Remove_missing_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove all peaks from {0}. /// </summary> public static string SkylineWindow_RemovePeak_Remove_all_peaks_from__0__ { get { return ResourceManager.GetString("SkylineWindow_RemovePeak_Remove_all_peaks_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove peak from {0}. /// </summary> public static string SkylineWindow_RemovePeak_Remove_peak_from__0__ { get { return ResourceManager.GetString("SkylineWindow_RemovePeak_Remove_peak_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove all peaks from {0}. /// </summary> public static string SkylineWindow_removePeakContextMenuItem_Click_Remove_all_peaks_from__0_ { get { return ResourceManager.GetString("SkylineWindow_removePeakContextMenuItem_Click_Remove_all_peaks_from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All. /// </summary> public static string SkylineWindow_removePeaksGraphMenuItem_DropDownOpening_All { get { return ResourceManager.GetString("SkylineWindow_removePeaksGraphMenuItem_DropDownOpening_All", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove repeated peptides. /// </summary> public static string SkylineWindow_removeRepeatedPeptidesMenuItem_Click_Remove_repeated_peptides { get { return ResourceManager.GetString("SkylineWindow_removeRepeatedPeptidesMenuItem_Click_Remove_repeated_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Remove retention time outliers. /// </summary> public static string SkylineWindow_RemoveRTOutliers_Remove_retention_time_outliers { get { return ResourceManager.GetString("SkylineWindow_RemoveRTOutliers_Remove_retention_time_outliers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reset default settings. /// </summary> public static string SkylineWindow_ResetDefaultSettings_Reset_default_settings { get { return ResourceManager.GetString("SkylineWindow_ResetDefaultSettings_Reset_default_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to restore document. /// </summary> public static string SkylineWindow_RestoreDocument_Failed_to_restore_document { get { return ResourceManager.GetString("SkylineWindow_RestoreDocument_Failed_to_restore_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed writing to {0}.. /// </summary> public static string SkylineWindow_SaveDocument_Failed_writing_to__0__ { get { return ResourceManager.GetString("SkylineWindow_SaveDocument_Failed_writing_to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Optimizing data file.... /// </summary> public static string SkylineWindow_SaveDocument_Optimizing_data_file___ { get { return ResourceManager.GetString("SkylineWindow_SaveDocument_Optimizing_data_file___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Saving.... /// </summary> public static string SkylineWindow_SaveDocument_Saving___ { get { return ResourceManager.GetString("SkylineWindow_SaveDocument_Saving___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Documents. /// </summary> public static string SkylineWindow_SaveDocumentAs_Skyline_Documents { get { return ResourceManager.GetString("SkylineWindow_SaveDocumentAs_Skyline_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded before it can be saved to a new name.. /// </summary> public static string SkylineWindow_SaveDocumentAs_The_document_must_be_fully_loaded_before_it_can_be_saved_to_a_new_name { get { return ResourceManager.GetString("SkylineWindow_SaveDocumentAs_The_document_must_be_fully_loaded_before_it_can_be_s" + "aved_to_a_new_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Name settings. /// </summary> public static string SkylineWindow_SaveSettings_Name_settings { get { return ResourceManager.GetString("SkylineWindow_SaveSettings_Name_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to use the Unimod definitions for the following modifications?. /// </summary> public static string SkylineWindow_sequenceTree_AfterLabelEdit_Would_you_like_to_use_the_Unimod_definitions_for_the_following_modifications { get { return ResourceManager.GetString("SkylineWindow_sequenceTree_AfterLabelEdit_Would_you_like_to_use_the_Unimod_defini" + "tions_for_the_following_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add {0}. /// </summary> public static string SkylineWindow_sequenceTree_AfterNodeEdit_Add__0__ { get { return ResourceManager.GetString("SkylineWindow_sequenceTree_AfterNodeEdit_Add__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit name {0}. /// </summary> public static string SkylineWindow_sequenceTree_AfterNodeEdit_Edit_name__0__ { get { return ResourceManager.GetString("SkylineWindow_sequenceTree_AfterNodeEdit_Edit_name__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Drag and drop. /// </summary> public static string SkylineWindow_sequenceTree_DragDrop_Drag_and_drop { get { return ResourceManager.GetString("SkylineWindow_sequenceTree_DragDrop_Drag_and_drop", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pick {0}. /// </summary> public static string SkylineWindow_sequenceTree_PickedChildrenEvent_Pick__0__ { get { return ResourceManager.GetString("SkylineWindow_sequenceTree_PickedChildrenEvent_Pick__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Clear standard type. /// </summary> public static string SkylineWindow_SetStandardType_Clear_standard_type { get { return ResourceManager.GetString("SkylineWindow_SetStandardType_Clear_standard_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Set standard type to {0}. /// </summary> public static string SkylineWindow_SetStandardType_Set_standard_type_to__0_ { get { return ResourceManager.GetString("SkylineWindow_SetStandardType_Set_standard_type_to__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Auto. /// </summary> public static string SkylineWindow_SetupCalculatorChooser_Auto { get { return ResourceManager.GetString("SkylineWindow_SetupCalculatorChooser_Auto", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compressing Files. /// </summary> public static string SkylineWindow_ShareDocument_Compressing_Files { get { return ResourceManager.GetString("SkylineWindow_ShareDocument_Compressing_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to create sharing file {0}.. /// </summary> public static string SkylineWindow_ShareDocument_Failed_attempting_to_create_sharing_file__0__ { get { return ResourceManager.GetString("SkylineWindow_ShareDocument_Failed_attempting_to_create_sharing_file__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Share Document. /// </summary> public static string SkylineWindow_shareDocumentMenuItem_Click_Share_Document { get { return ResourceManager.GetString("SkylineWindow_shareDocumentMenuItem_Click_Share_Document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Shared Documents. /// </summary> public static string SkylineWindow_shareDocumentMenuItem_Click_Skyline_Shared_Documents { get { return ResourceManager.GetString("SkylineWindow_shareDocumentMenuItem_Click_Skyline_Shared_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded before it can be shared.. /// </summary> public static string SkylineWindow_shareDocumentMenuItem_Click_The_document_must_be_fully_loaded_before_it_can_be_shared { get { return ResourceManager.GetString("SkylineWindow_shareDocumentMenuItem_Click_The_document_must_be_fully_loaded_befor" + "e_it_can_be_shared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be saved before it can be shared.. /// </summary> public static string SkylineWindow_shareDocumentMenuItem_Click_The_document_must_be_saved_before_it_can_be_shared { get { return ResourceManager.GetString("SkylineWindow_shareDocumentMenuItem_Click_The_document_must_be_saved_before_it_ca" + "n_be_shared", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Show {0} Score. /// </summary> public static string SkylineWindow_ShowCalculatorScoreFormat { get { return ResourceManager.GetString("SkylineWindow_ShowCalculatorScoreFormat", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have imported results.. /// </summary> public static string SkylineWindow_ShowChromatogramFeaturesDialog_The_document_must_have_imported_results_ { get { return ResourceManager.GetString("SkylineWindow_ShowChromatogramFeaturesDialog_The_document_must_have_imported_resu" + "lts_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have targets for which to export chromatograms.. /// </summary> public static string SkylineWindow_ShowChromatogramFeaturesDialog_The_document_must_have_targets_for_which_to_export_chromatograms_ { get { return ResourceManager.GetString("SkylineWindow_ShowChromatogramFeaturesDialog_The_document_must_have_targets_for_w" + "hich_to_export_chromatograms_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded in order to compare model peak picking.. /// </summary> public static string SkylineWindow_ShowCompareModelsDlg_The_document_must_be_fully_loaded_in_order_to_compare_model_peak_picking_ { get { return ResourceManager.GetString("SkylineWindow_ShowCompareModelsDlg_The_document_must_be_fully_loaded_in_order_to_" + "compare_model_peak_picking_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have targets in order to compare model peak picking.. /// </summary> public static string SkylineWindow_ShowCompareModelsDlg_The_document_must_have_targets_in_order_to_compare_model_peak_picking_ { get { return ResourceManager.GetString("SkylineWindow_ShowCompareModelsDlg_The_document_must_have_targets_in_order_to_com" + "pare_model_peak_picking_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Change document settings. /// </summary> public static string SkylineWindow_ShowDocumentSettingsDialog_Change_document_settings { get { return ResourceManager.GetString("SkylineWindow_ShowDocumentSettingsDialog_Change_document_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Update {0} calculator. /// </summary> public static string SkylineWindow_ShowEditCalculatorDlg_Update__0__calculator { get { return ResourceManager.GetString("SkylineWindow_ShowEditCalculatorDlg_Update__0__calculator", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ESP Feature Files. /// </summary> public static string SkylineWindow_ShowExportEspFeaturesDialog_ESP_Feature_Files { get { return ResourceManager.GetString("SkylineWindow_ShowExportEspFeaturesDialog_ESP_Feature_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export ESP Features. /// </summary> public static string SkylineWindow_ShowExportEspFeaturesDialog_Export_ESP_Features { get { return ResourceManager.GetString("SkylineWindow_ShowExportEspFeaturesDialog_Export_ESP_Features", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to save ESP features to {0}.. /// </summary> public static string SkylineWindow_ShowExportEspFeaturesDialog_Failed_attempting_to_save_ESP_features_to__0__ { get { return ResourceManager.GetString("SkylineWindow_ShowExportEspFeaturesDialog_Failed_attempting_to_save_ESP_features_" + "to__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain targets for which to export features.. /// </summary> public static string SkylineWindow_ShowExportEspFeaturesDialog_The_document_must_contain_targets_for_which_to_export_features_ { get { return ResourceManager.GetString("SkylineWindow_ShowExportEspFeaturesDialog_The_document_must_contain_targets_for_w" + "hich_to_export_features_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Export Spectral Library. /// </summary> public static string SkylineWindow_ShowExportSpectralLibraryDialog_Export_Spectral_Library { get { return ResourceManager.GetString("SkylineWindow_ShowExportSpectralLibraryDialog_Export_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exporting spectral library {0}.... /// </summary> public static string SkylineWindow_ShowExportSpectralLibraryDialog_Exporting_spectral_library__0____ { get { return ResourceManager.GetString("SkylineWindow_ShowExportSpectralLibraryDialog_Exporting_spectral_library__0____", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed exporting spectral library to {0}.. /// </summary> public static string SkylineWindow_ShowExportSpectralLibraryDialog_Failed_exporting_spectral_library_to__0__ { get { return ResourceManager.GetString("SkylineWindow_ShowExportSpectralLibraryDialog_Failed_exporting_spectral_library_t" + "o__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain at least one peptide precursor to export a spectral library.. /// </summary> public static string SkylineWindow_ShowExportSpectralLibraryDialog_The_document_must_contain_at_least_one_peptide_precursor_to_export_a_spectral_library_ { get { return ResourceManager.GetString("SkylineWindow_ShowExportSpectralLibraryDialog_The_document_must_contain_at_least_" + "one_peptide_precursor_to_export_a_spectral_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain results to export a spectral library.. /// </summary> public static string SkylineWindow_ShowExportSpectralLibraryDialog_The_document_must_contain_results_to_export_a_spectral_library_ { get { return ResourceManager.GetString("SkylineWindow_ShowExportSpectralLibraryDialog_The_document_must_contain_results_t" + "o_export_a_spectral_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Generate Decoys. /// </summary> public static string SkylineWindow_ShowGenerateDecoysDlg_Generate_Decoys { get { return ResourceManager.GetString("SkylineWindow_ShowGenerateDecoysDlg_Generate_Decoys", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded before importing a peptide search.. /// </summary> public static string SkylineWindow_ShowImportPeptideSearchDlg_The_document_must_be_fully_loaded_before_importing_a_peptide_search_ { get { return ResourceManager.GetString("SkylineWindow_ShowImportPeptideSearchDlg_The_document_must_be_fully_loaded_before" + "_importing_a_peptide_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You must save this document before importing a peptide search.. /// </summary> public static string SkylineWindow_ShowImportPeptideSearchDlg_You_must_save_this_document_before_importing_a_peptide_search_ { get { return ResourceManager.GetString("SkylineWindow_ShowImportPeptideSearchDlg_You_must_save_this_document_before_impor" + "ting_a_peptide_search_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must contain targets for which to export features.. /// </summary> public static string SkylineWindow_ShowMProphetFeaturesDialog_The_document_must_contain_targets_for_which_to_export_features_ { get { return ResourceManager.GetString("SkylineWindow_ShowMProphetFeaturesDialog_The_document_must_contain_targets_for_wh" + "ich_to_export_features_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have imported results.. /// </summary> public static string SkylineWindow_ShowMProphetFeaturesDialog_The_document_must_have_imported_results_ { get { return ResourceManager.GetString("SkylineWindow_ShowMProphetFeaturesDialog_The_document_must_have_imported_results_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Do you want to continue?. /// </summary> public static string SkylineWindow_ShowProgressErrorUI_Do_you_want_to_continue_ { get { return ResourceManager.GetString("SkylineWindow_ShowProgressErrorUI_Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Retry. /// </summary> public static string SkylineWindow_ShowProgressErrorUI_Retry { get { return ResourceManager.GetString("SkylineWindow_ShowProgressErrorUI_Retry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skip. /// </summary> public static string SkylineWindow_ShowProgressErrorUI_Skip { get { return ResourceManager.GetString("SkylineWindow_ShowProgressErrorUI_Skip", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Continue. /// </summary> public static string SkylineWindow_ShowPublishDlg_Continue { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_Continue", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Press Continue to use the server of your choice.. /// </summary> public static string SkylineWindow_ShowPublishDlg_Press_Continue_to_use_the_server_of_your_choice_ { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_Press_Continue_to_use_the_server_of_your_choice_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Press Register to register for a project on PanoramaWeb.. /// </summary> public static string SkylineWindow_ShowPublishDlg_Press_Register_to_register_for_a_project_on_PanoramaWeb_ { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_Press_Register_to_register_for_a_project_on_Panorama" + "Web_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Register. /// </summary> public static string SkylineWindow_ShowPublishDlg_Register { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_Register", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded before it can be uploaded.. /// </summary> public static string SkylineWindow_ShowPublishDlg_The_document_must_be_fully_loaded_before_it_can_be_uploaded_ { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_The_document_must_be_fully_loaded_before_it_can_be_u" + "ploaded_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be saved before it can be uploaded.. /// </summary> public static string SkylineWindow_ShowPublishDlg_The_document_must_be_saved_before_it_can_be_uploaded_ { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_The_document_must_be_saved_before_it_can_be_uploaded" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are no Panorama servers to upload to. /// </summary> public static string SkylineWindow_ShowPublishDlg_There_are_no_Panorama_servers_to_upload_to { get { return ResourceManager.GetString("SkylineWindow_ShowPublishDlg_There_are_no_Panorama_servers_to_upload_to", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Refine. /// </summary> public static string SkylineWindow_ShowRefineDlg_Refine { get { return ResourceManager.GetString("SkylineWindow_ShowRefineDlg_Refine", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Refining document. /// </summary> public static string SkylineWindow_ShowRefineDlg_Refining_document { get { return ResourceManager.GetString("SkylineWindow_ShowRefineDlg_Refining_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reintegrate peaks. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_Reintegrate_peaks { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_Reintegrate_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reintegration of results requires a trained peak scoring model.. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_Reintegration_of_results_requires_a_trained_peak_scoring_model_ { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_Reintegration_of_results_requires_a_trained_p" + "eak_scoring_model_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must be fully loaded before it can be re-integrated.. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_The_document_must_be_fully_loaded_before_it_can_be_re_integrated_ { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_The_document_must_be_fully_loaded_before_it_c" + "an_be_re_integrated_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have imported results.. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_The_document_must_have_imported_results_ { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_The_document_must_have_imported_results_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document must have targets in order to reintegrate chromatograms.. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_The_document_must_have_targets_in_order_to_reintegrate_chromatograms_ { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_The_document_must_have_targets_in_order_to_re" + "integrate_chromatograms_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected document change during operation.. /// </summary> public static string SkylineWindow_ShowReintegrateDialog_Unexpected_document_change_during_operation_ { get { return ResourceManager.GetString("SkylineWindow_ShowReintegrateDialog_Unexpected_document_change_during_operation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rename proteins. /// </summary> public static string SkylineWindow_ShowRenameProteinsDlg_Rename_proteins { get { return ResourceManager.GetString("SkylineWindow_ShowRenameProteinsDlg_Rename_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Choose a background proteome in the Digestions tab of the Peptide Settings.. /// </summary> public static string SkylineWindow_ShowUniquePeptidesDlg_Choose_a_background_proteome_in_the_Digestions_tab_of_the_Peptide_Settings { get { return ResourceManager.GetString("SkylineWindow_ShowUniquePeptidesDlg_Choose_a_background_proteome_in_the_Digestion" + "s_tab_of_the_Peptide_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Inspecting peptide uniqueness requires a background proteome.. /// </summary> public static string SkylineWindow_ShowUniquePeptidesDlg_Inspecting_peptide_uniqueness_requires_a_background_proteome { get { return ResourceManager.GetString("SkylineWindow_ShowUniquePeptidesDlg_Inspecting_peptide_uniqueness_requires_a_back" + "ground_proteome", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid file specified.. /// </summary> public static string SkylineWindow_SkylineWindow_Invalid_file_specified { get { return ResourceManager.GetString("SkylineWindow_SkylineWindow_Invalid_file_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Must have a window handle to begin processing.. /// </summary> public static string SkylineWindow_SkylineWindow_Must_have_a_window_handle_to_begin_processing { get { return ResourceManager.GetString("SkylineWindow_SkylineWindow_Must_have_a_window_handle_to_begin_processing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The URI {0} is not a file.. /// </summary> public static string SkylineWindow_SkylineWindow_The_URI__0__is_not_a_file { get { return ResourceManager.GetString("SkylineWindow_SkylineWindow_The_URI__0__is_not_a_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sort proteins by accession. /// </summary> public static string SkylineWindow_sortProteinsByAccessionToolStripMenuItem_Click_Sort_proteins_by_accession { get { return ResourceManager.GetString("SkylineWindow_sortProteinsByAccessionToolStripMenuItem_Click_Sort_proteins_by_acc" + "ession", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sort proteins by gene. /// </summary> public static string SkylineWindow_sortProteinsByGeneToolStripMenuItem_Click_Sort_proteins_by_gene { get { return ResourceManager.GetString("SkylineWindow_sortProteinsByGeneToolStripMenuItem_Click_Sort_proteins_by_gene", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sort proteins by preferred name. /// </summary> public static string SkylineWindow_sortProteinsByPreferredNameToolStripMenuItem_Click_Sort_proteins_by_preferred_name { get { return ResourceManager.GetString("SkylineWindow_sortProteinsByPreferredNameToolStripMenuItem_Click_Sort_proteins_by" + "_preferred_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Sort proteins by name. /// </summary> public static string SkylineWindow_sortProteinsMenuItem_Click_Sort_proteins_by_name { get { return ResourceManager.GetString("SkylineWindow_sortProteinsMenuItem_Click_Sort_proteins_by_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Showing targets at 1% FDR will set the replicate display type to single. Do you want to continue?. /// </summary> public static string SkylineWindow_targetsAt1FDRToolStripMenuItem_Click_Showing_targets_at_1__FDR_will_set_the_replicate_display_type_to_single__Do_you_want_to_continue_ { get { return ResourceManager.GetString("SkylineWindow_targetsAt1FDRToolStripMenuItem_Click_Showing_targets_at_1__FDR_will" + "_set_the_replicate_display_type_to_single__Do_you_want_to_continue_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected character &apos;{0}&apos; found on line {1}.. /// </summary> public static string SkylineWindow_Unexpected_character__0__found_on_line__1__ { get { return ResourceManager.GetString("SkylineWindow_Unexpected_character__0__found_on_line__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets. /// </summary> public static string SkylineWindow_UpdateAreaPointsTypeMenuItems_Targets { get { return ResourceManager.GetString("SkylineWindow_UpdateAreaPointsTypeMenuItems_Targets", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Targets at {0}% FDR. /// </summary> public static string SkylineWindow_UpdateAreaPointsTypeMenuItems_Targets_at__0___FDR { get { return ResourceManager.GetString("SkylineWindow_UpdateAreaPointsTypeMenuItems_Targets_at__0___FDR", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to load the window layout file {0}.. /// </summary> public static string SkylineWindow_UpdateGraphUI_Failure_attempting_to_load_the_window_layout_file__0__ { get { return ResourceManager.GetString("SkylineWindow_UpdateGraphUI_Failure_attempting_to_load_the_window_layout_file__0_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Rename or delete this file to restore the default layout.. /// </summary> public static string SkylineWindow_UpdateGraphUI_Rename_or_delete_this_file_to_restore_the_default_layout { get { return ResourceManager.GetString("SkylineWindow_UpdateGraphUI_Rename_or_delete_this_file_to_restore_the_default_lay" + "out", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline may also need to be restarted.. /// </summary> public static string SkylineWindow_UpdateGraphUI_Skyline_may_also_need_to_be_restarted { get { return ResourceManager.GetString("SkylineWindow_UpdateGraphUI_Skyline_may_also_need_to_be_restarted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ready. /// </summary> public static string SkylineWindow_UpdateProgressUI_Ready { get { return ResourceManager.GetString("SkylineWindow_UpdateProgressUI_Ready", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No libraries to show. Would you like to add a library?. /// </summary> public static string SkylineWindow_ViewSpectralLibraries_No_libraries_to_show_Would_you_like_to_add_a_library { get { return ResourceManager.GetString("SkylineWindow_ViewSpectralLibraries_No_libraries_to_show_Would_you_like_to_add_a_" + "library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Document Pointer. /// </summary> public static string SkypFile_FILTER_SKYP_Skyline_Document_Pointer { get { return ResourceManager.GetString("SkypFile_FILTER_SKYP_Skyline_Document_Pointer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Name of shared Skyline archive cannot be null or empty.. /// </summary> public static string SkypFile_GetNonExistentPath_Name_of_shared_Skyline_archive_cannot_be_null_or_empty_ { get { return ResourceManager.GetString("SkypFile_GetNonExistentPath_Name_of_shared_Skyline_archive_cannot_be_null_or_empt" + "y_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid URL on a Panorama server.. /// </summary> public static string SkypFile_GetSkyFileUrl__0__is_not_a_valid_URL_on_a_Panorama_server_ { get { return ResourceManager.GetString("SkypFile_GetSkyFileUrl__0__is_not_a_valid_URL_on_a_Panorama_server_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected the URL of a shared Skyline document archive ({0}) in the skyp file. Found {1} instead.. /// </summary> public static string SkypFile_GetSkyFileUrl_Expected_the_URL_of_a_shared_Skyline_document_archive___0___in_the_skyp_file__Found__1__instead_ { get { return ResourceManager.GetString("SkypFile_GetSkyFileUrl_Expected_the_URL_of_a_shared_Skyline_document_archive___0_" + "__in_the_skyp_file__Found__1__instead_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File does not contain the URL of a shared Skyline archive file ({0}) on a Panorama server.. /// </summary> public static string SkypFile_GetSkyFileUrl_File_does_not_contain_the_URL_of_a_shared_Skyline_archive_file___0___on_a_Panorama_server_ { get { return ResourceManager.GetString("SkypFile_GetSkyFileUrl_File_does_not_contain_the_URL_of_a_shared_Skyline_archive_" + "file___0___on_a_Panorama_server_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading {0}. /// </summary> public static string SkypSupport_Download_Downloading__0_ { get { return ResourceManager.GetString("SkypSupport_Download_Downloading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There was an error downloading the Skyline document specified in the skyp file: {0}.. /// </summary> public static string SkypSupport_Download_There_was_an_error_downloading_the_Skyline_document_specified_in_the_skyp_file___0__ { get { return ResourceManager.GetString("SkypSupport_Download_There_was_an_error_downloading_the_Skyline_document_specifie" + "d_in_the_skyp_file___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You do not have permissions to download this file from {0}.. /// </summary> public static string SkypSupport_Download_You_do_not_have_permissions_to_download_this_file_from__0__ { get { return ResourceManager.GetString("SkypSupport_Download_You_do_not_have_permissions_to_download_this_file_from__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to You may have to add {0} as a Panorama server from the Tools &gt; Options menu in Skyline.. /// </summary> public static string SkypSupport_Download_You_may_have_to_add__0__as_a_Panorama_server_from_the_Tools___Options_menu_in_Skyline_ { get { return ResourceManager.GetString("SkypSupport_Download_You_may_have_to_add__0__as_a_Panorama_server_from_the_Tools_" + "__Options_menu_in_Skyline_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading Skyline Document Archive. /// </summary> public static string SkypSupport_Open_Downloading_Skyline_Document_Archive { get { return ResourceManager.GetString("SkypSupport_Open_Downloading_Skyline_Document_Archive", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure opening skyp file.. /// </summary> public static string SkypSupport_Open_Failure_opening_skyp_file_ { get { return ResourceManager.GetString("SkypSupport_Open_Failure_opening_skyp_file_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Average mass. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_Average_mass { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_Average_mass", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formula. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_Formula { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_Formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to InChIKey. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_InChIKey { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_InChIKey", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Monoisotopic mass. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_Monoisotopic_mass { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_Monoisotopic_mass", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Name. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_Name { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to OtherIDs. /// </summary> public static string SmallMoleculeLibraryAttributes_KeyValuePairs_OtherIDs { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_KeyValuePairs_OtherIDs", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A molecule is defined by a chemical formula and at least one of Name, InChiKey, or other keys (HMDB etc). /// </summary> public static string SmallMoleculeLibraryAttributes_Validate_A_small_molecule_is_defined_by_a_chemical_formula_and_at_least_one_of_Name__InChiKey__or_other_keys__HMDB_etc_ { get { return ResourceManager.GetString("SmallMoleculeLibraryAttributes_Validate_A_small_molecule_is_defined_by_a_chemical" + "_formula_and_at_least_one_of_Name__InChiKey__or_other_keys__HMDB_etc_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compound. /// </summary> public static string SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders_Compound { get { return ResourceManager.GetString("SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders" + "_Compound", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders_Molecule { get { return ResourceManager.GetString("SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders" + "_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to RT (min). /// </summary> public static string SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders_RT__min_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListColumnHeaders_SmallMoleculeTransitionListColumnHeaders" + "_RT__min_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Inconsistent molecule description. /// </summary> public static string SmallMoleculeTransitionListReader_GetMoleculeTransitionGroup_Inconsistent_molecule_description { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_GetMoleculeTransitionGroup_Inconsistent_molecul" + "e_description", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot use product neutral loss chemical formula without a precursor chemical formula. /// </summary> public static string SmallMoleculeTransitionListReader_ProcessNeutralLoss_Cannot_use_product_neutral_loss_chemical_formula_without_a_precursor_chemical_formula { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ProcessNeutralLoss_Cannot_use_product_neutral_l" + "oss_chemical_formula_without_a_precursor_chemical_formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor molecular formula {0} does not contain sufficient atoms to be used with neutral loss {1}. /// </summary> public static string SmallMoleculeTransitionListReader_ProcessNeutralLoss_Precursor_molecular_formula__0__does_not_contain_sufficient_atoms_to_be_used_with_neutral_loss__1_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ProcessNeutralLoss_Precursor_molecular_formula_" + "_0__does_not_contain_sufficient_atoms_to_be_used_with_neutral_loss__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid CAS registry number.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_CAS_registry_number_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_CAS_re" + "gistry_number_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid HMDB identifier.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_HMDB_identifier_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_HMDB_i" + "dentifier_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid InChI identifier.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_InChI_identifier_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_InChI_" + "identifier_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} is not a valid InChiKey.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_InChiKey_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadMoleculeIdColumns__0__is_not_a_valid_InChiK" + "ey_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adduct {0} charge {1} does not agree with declared charge {2}. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Adduct__0__charge__1__does_not_agree_with_declared_charge__2_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Adduct__0__charge" + "__1__does_not_agree_with_declared_charge__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot derive charge from adduct description &quot;{0}&quot;. Use the corresponding Charge column to set this explicitly, or change the adduct description as needed.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Cannot_derive_charge_from_adduct_description___0____Use_the_corresponding_Charge_column_to_set_this_explicitly__or_change_the_adduct_description_as_needed_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Cannot_derive_cha" + "rge_from_adduct_description___0____Use_the_corresponding_Charge_column_to_set_th" + "is_explicitly__or_change_the_adduct_description_as_needed_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formula already contains an adduct description, and it does not match.. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Formula_already_contains_an_adduct_description__and_it_does_not_match_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Formula_already_c" + "ontains_an_adduct_description__and_it_does_not_match_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid collisional cross section value {0}. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_collisional_cross_section_value__0_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_collision" + "al_cross_section_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid ion mobility high energy offset value {0}. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobility_high_energy_offset_value__0_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobil" + "ity_high_energy_offset_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid ion mobility units value {0} (accepted values are {1}). /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobility_units_value__0___accepted_values_are__1__ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobil" + "ity_units_value__0___accepted_values_are__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid ion mobility value {0}. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobility_value__0_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Invalid_ion_mobil" + "ity_value__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing ion mobility units. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Missing_ion_mobility_units { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_Missing_ion_mobil" + "ity_units", resourceCulture); } } /// <summary> /// Looks up a localized string similar to unknown error. /// </summary> public static string SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_unknown_error { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_ReadPrecursorOrProductColumns_unknown_error", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error reading molecule column headers, did not recognize: ///{0} ///Supported values include: ///{1}. /// </summary> public static string SmallMoleculeTransitionListReader_SmallMoleculeTransitionListReader_ { get { return ResourceManager.GetString("SmallMoleculeTransitionListReader_SmallMoleculeTransitionListReader_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &quot;{0}&quot; is not a valid setting for full scan special handling. /// </summary> public static string SpecialHandlingType_Validate___0___is_not_a_valid_setting_for_full_scan_special_handling { get { return ResourceManager.GetString("SpecialHandlingType_Validate___0___is_not_a_valid_setting_for_full_scan_special_h" + "andling", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string SpecialHandlingType_Validate_None { get { return ResourceManager.GetString("SpecialHandlingType_Validate_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scan {0} found without precursor m/z.. /// </summary> public static string SpectraChromDataProvider_SpectraChromDataProvider_Scan__0__found_without_precursor_mz { get { return ResourceManager.GetString("SpectraChromDataProvider_SpectraChromDataProvider_Scan__0__found_without_precurso" + "r_mz", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Scan {0} found without scan time.. /// </summary> public static string SpectraChromDataProvider_SpectraChromDataProvider_Scan__0__found_without_scan_time { get { return ResourceManager.GetString("SpectraChromDataProvider_SpectraChromDataProvider_Scan__0__found_without_scan_tim" + "e", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Libraries:. /// </summary> public static string SpectralLibraryList_Label_Libraries { get { return ResourceManager.GetString("SpectralLibraryList_Label_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Libraries. /// </summary> public static string SpectralLibraryList_Title_Edit_Libraries { get { return ResourceManager.GetString("SpectralLibraryList_Title_Edit_Libraries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to SpectraST Library. /// </summary> public static string SpectrastLibrary_SpecFilter_SpectraST_Library { get { return ResourceManager.GetString("SpectrastLibrary_SpecFilter_SpectraST_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to determine isolation width for the scan targeted at {0}. /// </summary> public static string SpectrumFilter_CalcDiaIsolationValues_Unable_to_determine_isolation_width_for_the_scan_targeted_at__0_ { get { return ResourceManager.GetString("SpectrumFilter_CalcDiaIsolationValues_Unable_to_determine_isolation_width_for_the" + "_scan_targeted_at__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Two isolation windows contain targets which match the isolation target {0}.. /// </summary> public static string SpectrumFilter_FindFilterPairs_Two_isolation_windows_contain_targets_which_match_the_isolation_target__0__ { get { return ResourceManager.GetString("SpectrumFilter_FindFilterPairs_Two_isolation_windows_contain_targets_which_match_" + "the_isolation_target__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}{1}, Charge {2}. /// </summary> public static string SpectrumGraphItem_Title__0__1__Charge__2__ { get { return ResourceManager.GetString("SpectrumGraphItem_Title__0__1__Charge__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}{1}, Charge {2} ({3}). /// </summary> public static string SpectrumGraphItem_Title__0__1__Charge__2__3__ { get { return ResourceManager.GetString("SpectrumGraphItem_Title__0__1__Charge__2__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} library. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText__0__library { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data files. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Data_files { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Data_files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Data files: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Data_files___0_ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Data_files___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ID. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_ID { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_ID", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ID: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_ID__0__ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_ID__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matched spectra. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Matched_spectra { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Matched_spectra", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matched spectra: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Matched_spectra__0__ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Matched_spectra__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Revision. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Revision { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Revision", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Revision: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Revision__0__ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Revision__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unique peptides: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Unique_peptides { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Unique_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unique peptides: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Unique_peptides__0__ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Unique_peptides__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unique precursors: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Unique_Precursors___0_ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Unique_Precursors___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Version. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Version { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Version", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Version: {0}. /// </summary> public static string SpectrumLibraryInfoDlg_SetDetailsText_Version__0__ { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetDetailsText_Version__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library source:. /// </summary> public static string SpectrumLibraryInfoDlg_SetLibraryLinks_Library_source { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetLibraryLinks_Library_source", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library sources:. /// </summary> public static string SpectrumLibraryInfoDlg_SetLibraryLinks_Library_sources { get { return ResourceManager.GetString("SpectrumLibraryInfoDlg_SetLibraryLinks_Library_sources", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Two incompatible transition groups for sequence {0}, precursor m/z {1}.. /// </summary> public static string SpectrumMzInfo_CombineSpectrumInfo_Two_incompatible_transition_groups_for_sequence__0___precursor_m_z__1__ { get { return ResourceManager.GetString("SpectrumMzInfo_CombineSpectrumInfo_Two_incompatible_transition_groups_for_sequenc" + "e__0___precursor_m_z__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library spectrum for sequence {0} is missing.. /// </summary> public static string SpectrumMzInfo_GetInfoFromLibrary_Library_spectrum_for_sequence__0__is_missing_ { get { return ResourceManager.GetString("SpectrumMzInfo_GetInfoFromLibrary_Library_spectrum_for_sequence__0__is_missing_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Must have an active iRT calculator to add iRT peptides.. /// </summary> public static string SrmDocument_AddIrtPeptides_Must_have_an_active_iRT_calculator_to_add_iRT_peptides { get { return ResourceManager.GetString("SrmDocument_AddIrtPeptides_Must_have_an_active_iRT_calculator_to_add_iRT_peptides" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No replicate named {0} was found. /// </summary> public static string SrmDocument_ChangePeak_No_replicate_named__0__was_found { get { return ResourceManager.GetString("SrmDocument_ChangePeak_No_replicate_named__0__was_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results found for the precursor {0} in the file {1}. /// </summary> public static string SrmDocument_ChangePeak_No_results_found_for_the_precursor__0__in_the_file__1__ { get { return ResourceManager.GetString("SrmDocument_ChangePeak_No_results_found_for_the_precursor__0__in_the_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results found for the precursor {0} in the replicate {1}. /// </summary> public static string SrmDocument_ChangePeak_No_results_found_for_the_precursor__0__in_the_replicate__1__ { get { return ResourceManager.GetString("SrmDocument_ChangePeak_No_results_found_for_the_precursor__0__in_the_replicate__1" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} was not found in the replicate {1}.. /// </summary> public static string SrmDocument_ChangePeak_The_file__0__was_not_found_in_the_replicate__1__ { get { return ResourceManager.GetString("SrmDocument_ChangePeak_The_file__0__was_not_found_in_the_replicate__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Files. /// </summary> public static string SrmDocument_FILTER_DOC_AND_SKY_ZIP_Skyline_Files { get { return ResourceManager.GetString("SrmDocument_FILTER_DOC_AND_SKY_ZIP_Skyline_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Documents. /// </summary> public static string SrmDocument_FILTER_DOC_Skyline_Documents { get { return ResourceManager.GetString("SrmDocument_FILTER_DOC_Skyline_Documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to molecules. /// </summary> public static string SrmDocument_GetPeptideGroupId_molecules { get { return ResourceManager.GetString("SrmDocument_GetPeptideGroupId_molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to peptides. /// </summary> public static string SrmDocument_GetPeptideGroupId_peptides { get { return ResourceManager.GetString("SrmDocument_GetPeptideGroupId_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to sequence. /// </summary> public static string SrmDocument_GetPeptideGroupId_sequence { get { return ResourceManager.GetString("SrmDocument_GetPeptideGroupId_sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to molecules. /// </summary> public static string SrmDocument_GetSmallMoleculeGroupId_molecules { get { return ResourceManager.GetString("SrmDocument_GetSmallMoleculeGroupId_molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide {0} was found multiple times with user modifications.. /// </summary> public static string SrmDocument_MergeMatchingPeptidesUserInfo_The_peptide__0__was_found_multiple_times_with_user_modifications { get { return ResourceManager.GetString("SrmDocument_MergeMatchingPeptidesUserInfo_The_peptide__0__was_found_multiple_time" + "s_with_user_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid move source.. /// </summary> public static string SrmDocument_MoveNode_Invalid_move_source { get { return ResourceManager.GetString("SrmDocument_MoveNode_Invalid_move_source", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid move target.. /// </summary> public static string SrmDocument_MoveNode_Invalid_move_target { get { return ResourceManager.GetString("SrmDocument_MoveNode_Invalid_move_target", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Annotation found without name.. /// </summary> public static string SrmDocument_ReadAnnotations_Annotation_found_without_name { get { return ResourceManager.GetString("SrmDocument_ReadAnnotations_Annotation_found_without_name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The isotope modification type {0} does not exist in the document settings.. /// </summary> public static string SrmDocument_ReadLabelType_The_isotope_modification_type__0__does_not_exist_in_the_document_settings { get { return ResourceManager.GetString("SrmDocument_ReadLabelType_The_isotope_modification_type__0__does_not_exist_in_the" + "_document_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No file with id {0} found in the replicate {1}. /// </summary> public static string SrmDocument_ReadResults_No_file_with_id__0__found_in_the_replicate__1__ { get { return ResourceManager.GetString("SrmDocument_ReadResults_No_file_with_id__0__found_in_the_replicate__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No replicate named {0} found in measured results. /// </summary> public static string SrmDocument_ReadResults_No_replicate_named__0__found_in_measured_results { get { return ResourceManager.GetString("SrmDocument_ReadResults_No_replicate_named__0__found_in_measured_results", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No results information found in the document settings. /// </summary> public static string SrmDocument_ReadResults_No_results_information_found_in_the_document_settings { get { return ResourceManager.GetString("SrmDocument_ReadResults_No_results_information_found_in_the_document_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All transitions of decoy precursors must have a decoy mass shift.. /// </summary> public static string SrmDocument_ReadTransitionXml_All_transitions_of_decoy_precursors_must_have_a_decoy_mass_shift { get { return ResourceManager.GetString("SrmDocument_ReadTransitionXml_All_transitions_of_decoy_precursors_must_have_a_dec" + "oy_mass_shift", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document format version {0} is newer than the version {1} supported by {2}.. /// </summary> public static string SrmDocument_ReadXml_The_document_format_version__0__is_newer_than_the_version__1__supported_by__2__ { get { return ResourceManager.GetString("SrmDocument_ReadXml_The_document_format_version__0__is_newer_than_the_version__1_" + "_supported_by__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compressing files for sharing archive {0}. /// </summary> public static string SrmDocumentSharing_DefaultMessage_Compressing_files_for_sharing_archive__0__ { get { return ResourceManager.GetString("SrmDocumentSharing_DefaultMessage_Compressing_files_for_sharing_archive__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extracting files from sharing archive {0}. /// </summary> public static string SrmDocumentSharing_DefaultMessage_Extracting_files_from_sharing_archive__0__ { get { return ResourceManager.GetString("SrmDocumentSharing_DefaultMessage_Extracting_files_from_sharing_archive__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Shared Files. /// </summary> public static string SrmDocumentSharing_FILTER_SHARING_Shared_Files { get { return ResourceManager.GetString("SrmDocumentSharing_FILTER_SHARING_Shared_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The zip file is not a shared file.. /// </summary> public static string SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file { get { return ResourceManager.GetString("SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The zip file is not a shared file. The file contains multiple Skyline documents.. /// </summary> public static string SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file_The_file_contains_multiple_Skyline_documents { get { return ResourceManager.GetString("SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file_The_fi" + "le_contains_multiple_Skyline_documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The zip file is not a shared file. The file does not contain any Skyline documents.. /// </summary> public static string SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file_The_file_does_not_contain_any_Skyline_documents { get { return ResourceManager.GetString("SrmDocumentSharing_FindSharedSkylineFile_The_zip_file_is_not_a_shared_file_The_fi" + "le_does_not_contain_any_Skyline_documents", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Writing chromatograms. /// </summary> public static string SrmDocumentSharing_MinimizeToFile_Writing_chromatograms { get { return ResourceManager.GetString("SrmDocumentSharing_MinimizeToFile_Writing_chromatograms", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure removing temporary directory {0}.. /// </summary> public static string SrmDocumentSharing_ShareMinimal_Failure_removing_temporary_directory__0__ { get { return ResourceManager.GetString("SrmDocumentSharing_ShareMinimal_Failure_removing_temporary_directory__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extracting {0}. /// </summary> public static string SrmDocumentSharing_SrmDocumentSharing_ExtractProgress_Extracting__0__ { get { return ResourceManager.GetString("SrmDocumentSharing_SrmDocumentSharing_ExtractProgress_Extracting__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compressing {0}. /// </summary> public static string SrmDocumentSharing_SrmDocumentSharing_SaveProgress_Compressing__0__ { get { return ResourceManager.GetString("SrmDocumentSharing_SrmDocumentSharing_SaveProgress_Compressing__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results found in document with no replicates.. /// </summary> public static string SrmDocumentValidateChromInfoResults_found_in_document_with_no_replicates { get { return ResourceManager.GetString("SrmDocumentValidateChromInfoResults_found_in_document_with_no_replicates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Settings missing library spec.. /// </summary> public static string SrmSettings_ConnectLibrarySpecs_Settings_missing_library_spec { get { return ResourceManager.GetString("SrmSettings_ConnectLibrarySpecs_Settings_missing_library_spec", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The modification &apos;{0}&apos; already exists with a different definition.. /// </summary> public static string SrmSettings_UpdateDefaultModifications_The_modification__0__already_exists_with_a_different_definition { get { return ResourceManager.GetString("SrmSettings_UpdateDefaultModifications_The_modification__0__already_exists_with_a" + "_different_definition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 3 ions. /// </summary> public static string SrmSettingsList_DEFAULT_3_ions { get { return ResourceManager.GetString("SrmSettingsList_DEFAULT_3_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z &gt; precursor. /// </summary> public static string SrmSettingsList_DEFAULT_m_z_precursor { get { return ResourceManager.GetString("SrmSettingsList_DEFAULT_m_z_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default. /// </summary> public static string SrmSettingsList_DefaultName_Default { get { return ResourceManager.GetString("SrmSettingsList_DefaultName_Default", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Saved Settings:. /// </summary> public static string SrmSettingsList_Label_Saved_Settings { get { return ResourceManager.GetString("SrmSettingsList_Label_Saved_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Settings. /// </summary> public static string SrmSettingsList_Title_Edit_Settings { get { return ResourceManager.GetString("SrmSettingsList_Title_Edit_Settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to False. /// </summary> public static string SrmTreeNode_RenderTip_False { get { return ResourceManager.GetString("SrmTreeNode_RenderTip_False", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Note. /// </summary> public static string SrmTreeNode_RenderTip_Note { get { return ResourceManager.GetString("SrmTreeNode_RenderTip_Note", resourceCulture); } } /// <summary> /// Looks up a localized string similar to True. /// </summary> public static string SrmTreeNode_RenderTip_True { get { return ResourceManager.GetString("SrmTreeNode_RenderTip_True", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred creating options.. /// </summary> public static string SrmTreeNodeParent_ShowPickList_An_error_occurred_creating_options_ { get { return ResourceManager.GetString("SrmTreeNodeParent_ShowPickList_An_error_occurred_creating_options_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Surrogate Standard. /// </summary> public static string StandardType_SURROGATE_STANDARD_Surrogate_Standard { get { return ResourceManager.GetString("StandardType_SURROGATE_STANDARD_Surrogate_Standard", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import DIA Peptide Search. /// </summary> public static string StartPage_PopulateWizardPanel_Import_DIA_Peptide_Search { get { return ResourceManager.GetString("StartPage_PopulateWizardPanel_Import_DIA_Peptide_Search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Import PRM Peptide Search. /// </summary> public static string StartPage_PopulateWizardPanel_Import_PRM_Peptide_Search { get { return ResourceManager.GetString("StartPage_PopulateWizardPanel_Import_PRM_Peptide_Search", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use the Skyline Import Peptide Search wizard to build a spectral library from peptide search results on DIA data, and then import the raw data to quantify peptides using Skyline MS1 Filtering.. /// </summary> public static string StartPage_PopulateWizardPanel_Use_the_Skyline_Import_Peptide_Search_wizard_to_build_a_spectral_library_from_peptide_search_results_on_DIA_data__and_then_import_the_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_ { get { return ResourceManager.GetString("StartPage_PopulateWizardPanel_Use_the_Skyline_Import_Peptide_Search_wizard_to_bui" + "ld_a_spectral_library_from_peptide_search_results_on_DIA_data__and_then_import_t" + "he_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Use the Skyline Import Peptide Search wizard to build a spectral library from peptide search results on PRM data, and then import the raw data to quantify peptides using Skyline MS1 Filtering.. /// </summary> public static string StartPage_PopulateWizardPanel_Use_the_Skyline_Import_Peptide_Search_wizard_to_build_a_spectral_library_from_peptide_search_results_on_PRM_data__and_then_import_the_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_ { get { return ResourceManager.GetString("StartPage_PopulateWizardPanel_Use_the_Skyline_Import_Peptide_Search_wizard_to_bui" + "ld_a_spectral_library_from_peptide_search_results_on_PRM_data__and_then_import_t" + "he_raw_data_to_quantify_peptides_using_Skyline_MS1_Filtering_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Folder for tutorial files:. /// </summary> public static string StartPage_Tutorial__Folder_for_tutorial_files_ { get { return ResourceManager.GetString("StartPage_Tutorial__Folder_for_tutorial_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ZIP Files. /// </summary> public static string StartPage_Tutorial_Zip_Files { get { return ResourceManager.GetString("StartPage_Tutorial_Zip_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The settings have been reset to the default values.. /// </summary> public static string StartPageSettingsUI_btnResetDefaults_Click_The_settings_have_been_reset_to_the_default_values_ { get { return ResourceManager.GetString("StartPageSettingsUI_btnResetDefaults_Click_The_settings_have_been_reset_to_the_de" + "fault_values_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Integrate all: on. /// </summary> public static string StartPageSettingsUI_StartPageSettingsUI_Integrate_all__on { get { return ResourceManager.GetString("StartPageSettingsUI_StartPageSettingsUI_Integrate_all__on", resourceCulture); } } /// <summary> /// Looks up a localized string similar to List view item already has a description. /// </summary> public static string StatementCompletionForm_AddDescription_List_view_item_already_has_a_description { get { return ResourceManager.GetString("StatementCompletionForm_AddDescription_List_view_item_already_has_a_description", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Alternative Names:. /// </summary> public static string StatementCompletionTextBox_CreateListViewItems_Alternative_Names { get { return ResourceManager.GetString("StatementCompletionTextBox_CreateListViewItems_Alternative_Names", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Descriptions:. /// </summary> public static string StatementCompletionTextBox_CreateListViewItems_Descriptions { get { return ResourceManager.GetString("StatementCompletionTextBox_CreateListViewItems_Descriptions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid terminus &apos;{0}&apos;.. /// </summary> public static string StaticMod_ToModTerminus_Invalid_terminus__0__ { get { return ResourceManager.GetString("StaticMod_ToModTerminus_Invalid_terminus__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formula not allowed with labeled atoms.. /// </summary> public static string StaticMod_Validate_Formula_not_allowed_with_labeled_atoms { get { return ResourceManager.GetString("StaticMod_Validate_Formula_not_allowed_with_labeled_atoms", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid amino acid &apos;{0}&apos;.. /// </summary> public static string StaticMod_Validate_Invalid_amino_acid___0___ { get { return ResourceManager.GetString("StaticMod_Validate_Invalid_amino_acid___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loss-only modifications may not be explicit.. /// </summary> public static string StaticMod_Validate_Loss_only_modifications_may_not_be_explicit { get { return ResourceManager.GetString("StaticMod_Validate_Loss_only_modifications_may_not_be_explicit", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loss-only modifications may not be variable.. /// </summary> public static string StaticMod_Validate_Loss_only_modifications_may_not_be_variable { get { return ResourceManager.GetString("StaticMod_Validate_Loss_only_modifications_may_not_be_variable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modification formula may not be empty.. /// </summary> public static string StaticMod_Validate_Modification_formula_may_not_be_empty { get { return ResourceManager.GetString("StaticMod_Validate_Modification_formula_may_not_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modification must specify a formula, labeled atoms or valid monoisotopic and average masses.. /// </summary> public static string StaticMod_Validate_Modification_must_specify_a_formula_labeled_atoms_or_valid_monoisotopic_and_average_masses { get { return ResourceManager.GetString("StaticMod_Validate_Modification_must_specify_a_formula_labeled_atoms_or_valid_mon" + "oisotopic_and_average_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modification with a formula may not specify modification masses.. /// </summary> public static string StaticMod_Validate_Modification_with_a_formula_may_not_specify_modification_masses { get { return ResourceManager.GetString("StaticMod_Validate_Modification_with_a_formula_may_not_specify_modification_masse" + "s", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Terminal modification with labeled atoms not allowed.. /// </summary> public static string StaticMod_Validate_Terminal_modification_with_labeled_atoms_not_allowed { get { return ResourceManager.GetString("StaticMod_Validate_Terminal_modification_with_labeled_atoms_not_allowed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Variable modifications must specify amino acid or terminus.. /// </summary> public static string StaticMod_Validate_Variable_modifications_must_specify_amino_acid_or_terminus { get { return ResourceManager.GetString("StaticMod_Validate_Variable_modifications_must_specify_amino_acid_or_terminus", resourceCulture); } } /// <summary> /// Looks up a localized string similar to &amp;Modifications:. /// </summary> public static string StaticModList_Label_Modifications { get { return ResourceManager.GetString("StaticModList_Label_Modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Edit Structural Modifications. /// </summary> public static string StaticModList_Title_Edit_Structural_Modifications { get { return ResourceManager.GetString("StaticModList_Title_Edit_Structural_Modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to foo. /// </summary> public static string String1 { get { return ResourceManager.GetString("String1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptide. /// </summary> public static string SummaryPeptideGraphPane_SummaryPeptideGraphPane_Peptide { get { return ResourceManager.GetString("SummaryPeptideGraphPane_SummaryPeptideGraphPane_Peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Log. /// </summary> public static string SummaryPeptideGraphPane_UpdateAxes_Log { get { return ResourceManager.GetString("SummaryPeptideGraphPane_UpdateAxes_Log", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Replicate. /// </summary> public static string SummaryReplicateGraphPane_SummaryReplicateGraphPane_Replicate { get { return ResourceManager.GetString("SummaryReplicateGraphPane_SummaryReplicateGraphPane_Replicate", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to resolve molecule from &apos;{0}&apos;.. /// </summary> public static string TargetResolver_TryResolveTarget_Unable_to_resolve_molecule_from___0___ { get { return ResourceManager.GetString("TargetResolver_TryResolveTarget_Unable_to_resolve_molecule_from___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to resolve molecule from &quot;{0}&quot;: could be any of {1}. /// </summary> public static string TargetResolver_TryResolveTarget_Unable_to_resolve_molecule_from___0____could_be_any_of__1_ { get { return ResourceManager.GetString("TargetResolver_TryResolveTarget_Unable_to_resolve_molecule_from___0____could_be_a" + "ny_of__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error running process. /// </summary> public static string TestNamedPipeProcessRunner_RunProcess_Error_running_process { get { return ResourceManager.GetString("TestNamedPipeProcessRunner_RunProcess_Error_running_process", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The operation was canceled by the user.. /// </summary> public static string TestSkylineProcessRunner_RunProcess_The_operation_was_canceled_by_the_user_ { get { return ResourceManager.GetString("TestSkylineProcessRunner_RunProcess_The_operation_was_canceled_by_the_user_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot find a file with that identifier.. /// </summary> public static string TestToolStoreClient_GetToolZipFile_Cannot_find_a_file_with_that_identifier_ { get { return ResourceManager.GetString("TestToolStoreClient_GetToolZipFile_Cannot_find_a_file_with_that_identifier_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error downloading tool. /// </summary> public static string TestToolStoreClient_GetToolZipFile_Error_downloading_tool { get { return ResourceManager.GetString("TestToolStoreClient_GetToolZipFile_Error_downloading_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CSV (Comma delimited). /// </summary> public static string TextUtil_DESCRIPTION_CSV_CSV__Comma_delimited_ { get { return ResourceManager.GetString("TextUtil_DESCRIPTION_CSV_CSV__Comma_delimited_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to TSV (Tab delimited). /// </summary> public static string TextUtil_DESCRIPTION_TSV_TSV__Tab_delimited_ { get { return ResourceManager.GetString("TextUtil_DESCRIPTION_TSV_TSV__Tab_delimited_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All Files. /// </summary> public static string TextUtil_FileDialogFiltersAll_All_Files { get { return ResourceManager.GetString("TextUtil_FileDialogFiltersAll_All_Files", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to find a valid Thermo instrument installation.. /// </summary> public static string ThermoMassListExporter_EnsureLibraries_Failed_to_find_a_valid_Thermo_instrument_installation_ { get { return ResourceManager.GetString("ThermoMassListExporter_EnsureLibraries_Failed_to_find_a_valid_Thermo_instrument_i" + "nstallation_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Thermo instrument software may not be installed correctly. The library {0} could not be found.. /// </summary> public static string ThermoMassListExporter_EnsureLibraries_Thermo_instrument_software_may_not_be_installed_correctly__The_library__0__could_not_be_found_ { get { return ResourceManager.GetString("ThermoMassListExporter_EnsureLibraries_Thermo_instrument_software_may_not_be_inst" + "alled_correctly__The_library__0__could_not_be_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Thermo method creation software may not be installed correctly.. /// </summary> public static string ThermoMassListExporter_EnsureLibraries_Thermo_method_creation_software_may_not_be_installed_correctly_ { get { return ResourceManager.GetString("ThermoMassListExporter_EnsureLibraries_Thermo_method_creation_software_may_not_be" + "_installed_correctly_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error loading report from the temporary file {0}. /// </summary> public static string ToolDescription_CallArgsCollector_Error_loading_report_from_the_temporary_file__0_ { get { return ResourceManager.GetString("ToolDescription_CallArgsCollector_Error_loading_report_from_the_temporary_file__0" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error running the installed tool {0}. The method &apos;{1}&apos; has the wrong signature.. /// </summary> public static string ToolDescription_CallArgsCollector_Error_running_the_installed_tool__0___The_method___1___has_the_wrong_signature_ { get { return ResourceManager.GetString("ToolDescription_CallArgsCollector_Error_running_the_installed_tool__0___The_metho" + "d___1___has_the_wrong_signature_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find any CollectArgs method to call on class &apos;{0}&apos;.. /// </summary> public static string ToolDescription_FindArgsCollectorMethod_Unable_to_find_any_CollectArgs_method_to_call_on_class___0___ { get { return ResourceManager.GetString("ToolDescription_FindArgsCollectorMethod_Unable_to_find_any_CollectArgs_method_to_" + "call_on_class___0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error running the installed tool {0}. It seems to have an error in one of its files. Please reinstall the tool and try again. /// </summary> public static string ToolDescription_RunExecutableBackground_Error_running_the_installed_tool__0___It_seems_to_have_an_error_in_one_of_its_files__Please_reinstall_the_tool_and_try_again { get { return ResourceManager.GetString("ToolDescription_RunExecutableBackground_Error_running_the_installed_tool__0___It_" + "seems_to_have_an_error_in_one_of_its_files__Please_reinstall_the_tool_and_try_ag" + "ain", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error running the installed tool {0}. It seems to be missing a file. Please reinstall the tool and try again.. /// </summary> public static string ToolDescription_RunExecutableBackground_Error_running_the_installed_tool_0_It_seems_to_be_missing_a_file__Please_reinstall_the_tool_and_try_again_ { get { return ResourceManager.GetString("ToolDescription_RunExecutableBackground_Error_running_the_installed_tool_0_It_see" + "ms_to_be_missing_a_file__Please_reinstall_the_tool_and_try_again_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool {0} had an error.. /// </summary> public static string ToolDescription_RunExecutableBackground_The_tool__0__had_an_error_ { get { return ResourceManager.GetString("ToolDescription_RunExecutableBackground_The_tool__0__had_an_error_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool {0} had an error, it returned the message:. /// </summary> public static string ToolDescription_RunExecutableBackground_The_tool__0__had_an_error__it_returned_the_message_ { get { return ResourceManager.GetString("ToolDescription_RunExecutableBackground_The_tool__0__had_an_error__it_returned_th" + "e_message_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File not found.. /// </summary> public static string ToolDescription_RunTool_File_not_found_ { get { return ResourceManager.GetString("ToolDescription_RunTool_File_not_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please check the command location is correct for this tool.. /// </summary> public static string ToolDescription_RunTool_Please_check_the_command_location_is_correct_for_this_tool_ { get { return ResourceManager.GetString("ToolDescription_RunTool_Please_check_the_command_location_is_correct_for_this_too" + "l_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please check the External Tools Store on the Skyline web site for the most recent version of the QuaSAR external tool.. /// </summary> public static string ToolDescription_RunTool_Please_check_the_External_Tools_Store_on_the_Skyline_web_site_for_the_most_recent_version_of_the_QuaSAR_external_tool_ { get { return ResourceManager.GetString("ToolDescription_RunTool_Please_check_the_External_Tools_Store_on_the_Skyline_web_" + "site_for_the_most_recent_version_of_the_QuaSAR_external_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please reconfigure that tool, it failed to execute. . /// </summary> public static string ToolDescription_RunTool_Please_reconfigure_that_tool__it_failed_to_execute__ { get { return ResourceManager.GetString("ToolDescription_RunTool_Please_reconfigure_that_tool__it_failed_to_execute__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Support for the GenePattern version of QuaSAR has been discontinued.. /// </summary> public static string ToolDescription_RunTool_Support_for_the_GenePattern_version_of_QuaSAR_has_been_discontinued_ { get { return ResourceManager.GetString("ToolDescription_RunTool_Support_for_the_GenePattern_version_of_QuaSAR_has_been_di" + "scontinued_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tools must have a command line. /// </summary> public static string ToolDescription_Validate_Tools_must_have_a_command_line { get { return ResourceManager.GetString("ToolDescription_Validate_Tools_must_have_a_command_line", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tools must have a title. /// </summary> public static string ToolDescription_Validate_Tools_must_have_a_title { get { return ResourceManager.GetString("ToolDescription_Validate_Tools_must_have_a_title", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please enable these annotations and fill in the appropriate data in order to use the tool.. /// </summary> public static string ToolDescription_VerifyAnnotations_Please_enable_these_annotations_and_fill_in_the_appropriate_data_in_order_to_use_the_tool_ { get { return ResourceManager.GetString("ToolDescription_VerifyAnnotations_Please_enable_these_annotations_and_fill_in_the" + "_appropriate_data_in_order_to_use_the_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please re-install the tool and try again.. /// </summary> public static string ToolDescription_VerifyAnnotations_Please_re_install_the_tool_and_try_again_ { get { return ResourceManager.GetString("ToolDescription_VerifyAnnotations_Please_re_install_the_tool_and_try_again_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires the use of the following annotations which are missing or improperly formatted. /// </summary> public static string ToolDescription_VerifyAnnotations_This_tool_requires_the_use_of_the_following_annotations_which_are_missing_or_improperly_formatted { get { return ResourceManager.GetString("ToolDescription_VerifyAnnotations_This_tool_requires_the_use_of_the_following_ann" + "otations_which_are_missing_or_improperly_formatted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires the use of the following annotations which are not enabled for this document. /// </summary> public static string ToolDescription_VerifyAnnotations_This_tool_requires_the_use_of_the_following_annotations_which_are_not_enabled_for_this_document { get { return ResourceManager.GetString("ToolDescription_VerifyAnnotations_This_tool_requires_the_use_of_the_following_ann" + "otations_which_are_not_enabled_for_this_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: {0} requires a report titled {1} which no longer exists. Please select a new report or import the report format. /// </summary> public static string ToolDescriptionHelpers_GetReport_Error_0_requires_a_report_titled_1_which_no_longer_exists__Please_select_a_new_report_or_import_the_report_format { get { return ResourceManager.GetString("ToolDescriptionHelpers_GetReport_Error_0_requires_a_report_titled_1_which_no_long" + "er_exists__Please_select_a_new_report_or_import_the_report_format", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing the file {0}. Tool {1} import failed. /// </summary> public static string ToolInstaller_AddToolFromProperties_Missing_the_file__0___Tool__1__import_failed { get { return ResourceManager.GetString("ToolInstaller_AddToolFromProperties_Missing_the_file__0___Tool__1__import_failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: It does not contain the required {0} in the {1} directory.. /// </summary> public static string ToolInstaller_GetToolInfo_Error__It_does_not_contain_the_required__0__in_the__1__directory_ { get { return ResourceManager.GetString("ToolInstaller_GetToolInfo_Error__It_does_not_contain_the_required__0__in_the__1__" + "directory_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to process the {0} file. /// </summary> public static string ToolInstaller_GetToolInfo_Failed_to_process_the__0__file { get { return ResourceManager.GetString("ToolInstaller_GetToolInfo_Failed_to_process_the__0__file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: It does not contain the required {0} directory.. /// </summary> public static string ToolInstaller_UnpackZipTool_Error__It_does_not_contain_the_required__0__directory_ { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_Error__It_does_not_contain_the_required__0__directory" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: The {0} does not contain a valid {1} attribute.. /// </summary> public static string ToolInstaller_UnpackZipTool_Error__The__0__does_not_contain_a_valid__1__attribute_ { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_Error__The__0__does_not_contain_a_valid__1__attribute" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error: There is a file missing the {0}.zip. /// </summary> public static string ToolInstaller_UnpackZipTool_Error__There_is_a_file_missing_the__0__zip { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_Error__There_is_a_file_missing_the__0__zip", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected zip file is not a valid installable tool.. /// </summary> public static string ToolInstaller_UnpackZipTool_The_selected_zip_file_is_not_a_valid_installable_tool_ { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_The_selected_zip_file_is_not_a_valid_installable_tool" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected zip file is not an installable tool.. /// </summary> public static string ToolInstaller_UnpackZipTool_The_selected_zip_file_is_not_an_installable_tool_ { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_The_selected_zip_file_is_not_an_installable_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tool Uses R and specifies Packages without an {0} file in the tool-inf directory.. /// </summary> public static string ToolInstaller_UnpackZipTool_Tool_Uses_R_and_specifies_Packages_without_an__0__file_in_the_tool_inf_directory_ { get { return ResourceManager.GetString("ToolInstaller_UnpackZipTool_Tool_Uses_R_and_specifies_Packages_without_an__0__fil" + "e_in_the_tool_inf_directory_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The server returned an invalid response. It might be down for maintenance. Please check the Tool Store on the skyline.ms website.. /// </summary> public static string ToolInstallUI_InstallZipFromWeb_The_server_returned_an_invalid_response__It_might_be_down_for_maintenance__Please_check_the_Tool_Store_on_the_skyline_ms_website_ { get { return ResourceManager.GetString("ToolInstallUI_InstallZipFromWeb_The_server_returned_an_invalid_response__It_might" + "_be_down_for_maintenance__Please_check_the_Tool_Store_on_the_skyline_ms_website_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Active Replicate Name. /// </summary> public static string ToolMacros__listArguments_Active_Replicate_Name { get { return ResourceManager.GetString("ToolMacros__listArguments_Active_Replicate_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Collected Arguments. /// </summary> public static string ToolMacros__listArguments_Collected_Arguments { get { return ResourceManager.GetString("ToolMacros__listArguments_Collected_Arguments", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document Directory. /// </summary> public static string ToolMacros__listArguments_Document_Directory { get { return ResourceManager.GetString("ToolMacros__listArguments_Document_Directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document File Name. /// </summary> public static string ToolMacros__listArguments_Document_File_Name { get { return ResourceManager.GetString("ToolMacros__listArguments_Document_File_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document File Name Without Extension. /// </summary> public static string ToolMacros__listArguments_Document_File_Name_Without_Extension { get { return ResourceManager.GetString("ToolMacros__listArguments_Document_File_Name_Without_Extension", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Document Path. /// </summary> public static string ToolMacros__listArguments_Document_Path { get { return ResourceManager.GetString("ToolMacros__listArguments_Document_Path", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Input Report Temp Path. /// </summary> public static string ToolMacros__listArguments_Input_Report_Temp_Path { get { return ResourceManager.GetString("ToolMacros__listArguments_Input_Report_Temp_Path", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a peptide sequence before running this tool.. /// </summary> public static string ToolMacros__listArguments_Please_select_a_peptide_sequence_before_running_this_tool_ { get { return ResourceManager.GetString("ToolMacros__listArguments_Please_select_a_peptide_sequence_before_running_this_to" + "ol_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a protein before running this tool.. /// </summary> public static string ToolMacros__listArguments_Please_select_a_protein_before_running_this_tool_ { get { return ResourceManager.GetString("ToolMacros__listArguments_Please_select_a_protein_before_running_this_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Selected Peptide Sequence. /// </summary> public static string ToolMacros__listArguments_Selected_Peptide_Sequence { get { return ResourceManager.GetString("ToolMacros__listArguments_Selected_Peptide_Sequence", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Selected Precursor. /// </summary> public static string ToolMacros__listArguments_Selected_Precursor { get { return ResourceManager.GetString("ToolMacros__listArguments_Selected_Precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Selected Protein Name. /// </summary> public static string ToolMacros__listArguments_Selected_Protein_Name { get { return ResourceManager.GetString("ToolMacros__listArguments_Selected_Protein_Name", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skyline Connection. /// </summary> public static string ToolMacros__listArguments_Skyline_Connection { get { return ResourceManager.GetString("ToolMacros__listArguments_Skyline_Connection", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool does not provide the functionality for the Collected Arguments macro. Please edit the tool.. /// </summary> public static string ToolMacros__listArguments_This_tool_does_not_provide_the_functionality_for_the_Collected_Arguments_macro__Please_edit_the_tool_ { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_does_not_provide_the_functionality_for_the_Co" + "llected_Arguments_macro__Please_edit_the_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool is not an installed tool so $(ToolDir) cannot be used as a macro. Please edit the tool.. /// </summary> public static string ToolMacros__listArguments_This_tool_is_not_an_installed_tool_so_ToolDir_cannot_be_used_as_a_macro__Please_edit_the_tool_ { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_is_not_an_installed_tool_so_ToolDir_cannot_be" + "_used_as_a_macro__Please_edit_the_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Document Directory to run. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Document_Directory_to_run { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Document_Directory_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Document File Name to run.. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Document_File_Name__to_run_ { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Document_File_Name__to_run_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Document File Name to run. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Document_File_Name_to_run { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Document_File_Name_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Document Path to run. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Document_Path_to_run { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Document_Path_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Selected Peptide Sequence to run. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Selected_Peptide_Sequence_to_run { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Selected_Peptide_Sequence_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Selected Protein to run.. /// </summary> public static string ToolMacros__listArguments_This_tool_requires_a_Selected_Protein_to_run_ { get { return ResourceManager.GetString("ToolMacros__listArguments_This_tool_requires_a_Selected_Protein_to_run_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tool Directory. /// </summary> public static string ToolMacros__listArguments_Tool_Directory { get { return ResourceManager.GetString("ToolMacros__listArguments_Tool_Directory", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No Path Provided. Tool execution cancled.. /// </summary> public static string ToolMacros__listCommand__No_Path_Provided__Tool_execution_cancled_ { get { return ResourceManager.GetString("ToolMacros__listCommand__No_Path_Provided__Tool_execution_cancled_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Program Path. /// </summary> public static string ToolMacros__listCommand_Program_Path { get { return ResourceManager.GetString("ToolMacros__listCommand_Program_Path", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Program Path to run.. /// </summary> public static string ToolMacros__listCommand_This_tool_requires_a_Program_Path_to_run_ { get { return ResourceManager.GetString("ToolMacros__listCommand_This_tool_requires_a_Program_Path_to_run_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error exporting the report, tool execution canceled.. /// </summary> public static string ToolMacros_GetReportTempPath_Error_exporting_the_report__tool_execution_canceled_ { get { return ResourceManager.GetString("ToolMacros_GetReportTempPath_Error_exporting_the_report__tool_execution_canceled_" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The selected tool ( {0} ) requires a selected report. Please select a report for this tool.. /// </summary> public static string ToolMacros_GetReportTempPath_The_selected_tool_0_requires_a_selected_report_Please_select_a_report_for_this_tool_ { get { return ResourceManager.GetString("ToolMacros_GetReportTempPath_The_selected_tool_0_requires_a_selected_report_Pleas" + "e_select_a_report_for_this_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please select a precursor before running this tool.. /// </summary> public static string ToolMacros_listArguments_Please_select_a_precursor_before_running_this_tool_ { get { return ResourceManager.GetString("ToolMacros_listArguments_Please_select_a_precursor_before_running_this_tool_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a Selected Precursor to run. /// </summary> public static string ToolMacros_listArguments_This_tool_requires_a_Selected_Precursor_to_run { get { return ResourceManager.GetString("ToolMacros_listArguments_This_tool_requires_a_Selected_Precursor_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires a selected report. /// </summary> public static string ToolMacros_listArguments_This_tool_requires_a_selected_report { get { return ResourceManager.GetString("ToolMacros_listArguments_This_tool_requires_a_selected_report", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This tool requires an Active Replicate Name to run. /// </summary> public static string ToolMacros_listArguments_This_tool_requires_an_Active_Replicate_Name_to_run { get { return ResourceManager.GetString("ToolMacros_listArguments_This_tool_requires_an_Active_Replicate_Name_to_run", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to clear all saved settings? This will immediately return {0} to its original configuration and cannot be undone.. /// </summary> public static string ToolOptionsUI_btnResetSettings_Click_Are_you_sure_you_want_to_clear_all_saved_settings__This_will_immediately_return__0__to_its_original_configuration_and_cannot_be_undone_ { get { return ResourceManager.GetString("ToolOptionsUI_btnResetSettings_Click_Are_you_sure_you_want_to_clear_all_saved_set" + "tings__This_will_immediately_return__0__to_its_original_configuration_and_cannot" + "_be_undone_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Default ({0}). /// </summary> public static string ToolOptionsUI_ToolOptionsUI_Default___0__ { get { return ResourceManager.GetString("ToolOptionsUI_ToolOptionsUI_Default___0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert proteins. /// </summary> public static string ToolService_ImportFasta_Insert_proteins { get { return ResourceManager.GetString("ToolService_ImportFasta_Insert_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Insert Molecule Transition List. /// </summary> public static string ToolService_InsertSmallMoleculeTransitionList_Insert_Small_Molecule_Transition_List { get { return ResourceManager.GetString("ToolService_InsertSmallMoleculeTransitionList_Insert_Small_Molecule_Transition_Li" + "st", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading {0}. /// </summary> public static string ToolStoreDlg_DownloadSelectedTool_Downloading__0_ { get { return ResourceManager.GetString("ToolStoreDlg_DownloadSelectedTool_Downloading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Currently installed and fully updated (Version: {0}).. /// </summary> public static string ToolStoreDlg_FormatVersionText_Currently_installed_and_fully_updated__Version___0___ { get { return ResourceManager.GetString("ToolStoreDlg_FormatVersionText_Currently_installed_and_fully_updated__Version___0" + "___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Not currently installed. Version: {0} is available. /// </summary> public static string ToolStoreDlg_FormatVersionText_Not_currently_installed__Version___0__is_available { get { return ResourceManager.GetString("ToolStoreDlg_FormatVersionText_Not_currently_installed__Version___0__is_available" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Version {0} currently installed. Version {1} is available.. /// </summary> public static string ToolStoreDlg_FormatVersionText_Version__0__currently_installed__Version__1__is_available_ { get { return ResourceManager.GetString("ToolStoreDlg_FormatVersionText_Version__0__currently_installed__Version__1__is_av" + "ailable_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Install. /// </summary> public static string ToolStoreDlg_UpdateDisplayedTool_Install { get { return ResourceManager.GetString("ToolStoreDlg_UpdateDisplayedTool_Install", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Reinstall. /// </summary> public static string ToolStoreDlg_UpdateDisplayedTool_Reinstall { get { return ResourceManager.GetString("ToolStoreDlg_UpdateDisplayedTool_Reinstall", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Update. /// </summary> public static string ToolStoreDlg_UpdateDisplayedTool_Update { get { return ResourceManager.GetString("ToolStoreDlg_UpdateDisplayedTool_Update", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap ToolUpdateAvailable { get { object obj = ResourceManager.GetObject("ToolUpdateAvailable", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Please select at least one tool to update.. /// </summary> public static string ToolUpdatesDlg_btnUpdate_Click_Please_select_at_least_one_tool_to_update_ { get { return ResourceManager.GetString("ToolUpdatesDlg_btnUpdate_Click_Please_select_at_least_one_tool_to_update_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to download updates for the following packages. /// </summary> public static string ToolUpdatesDlg_DisplayDownloadSummary_Failed_to_download_updates_for_the_following_packages { get { return ResourceManager.GetString("ToolUpdatesDlg_DisplayDownloadSummary_Failed_to_download_updates_for_the_followin" + "g_packages", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to update the following tool. /// </summary> public static string ToolUpdatesDlg_DisplayInstallSummary_Failed_to_update_the_following_tool { get { return ResourceManager.GetString("ToolUpdatesDlg_DisplayInstallSummary_Failed_to_update_the_following_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed to update the following tools. /// </summary> public static string ToolUpdatesDlg_DisplayInstallSummary_Failed_to_update_the_following_tools { get { return ResourceManager.GetString("ToolUpdatesDlg_DisplayInstallSummary_Failed_to_update_the_following_tools", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Successfully updated the following tool. /// </summary> public static string ToolUpdatesDlg_DisplayInstallSummary_Successfully_updated_the_following_tool { get { return ResourceManager.GetString("ToolUpdatesDlg_DisplayInstallSummary_Successfully_updated_the_following_tool", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Successfully updated the following tools. /// </summary> public static string ToolUpdatesDlg_DisplayInstallSummary_Successfully_updated_the_following_tools { get { return ResourceManager.GetString("ToolUpdatesDlg_DisplayInstallSummary_Successfully_updated_the_following_tools", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading updates for {0}. /// </summary> public static string ToolUpdatesDlg_DownloadTools_Downloading_updates_for__0_ { get { return ResourceManager.GetString("ToolUpdatesDlg_DownloadTools_Downloading_updates_for__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading Updates. /// </summary> public static string ToolUpdatesDlg_GetTools_Downloading_Updates { get { return ResourceManager.GetString("ToolUpdatesDlg_GetTools_Downloading_Updates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Downloading Updates. /// </summary> public static string ToolUpdatesDlg_GetToolsToUpdate_Downloading_Updates { get { return ResourceManager.GetString("ToolUpdatesDlg_GetToolsToUpdate_Downloading_Updates", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Installing updates to {0}. /// </summary> public static string ToolUpdatesDlg_InstallUpdates_Installing_updates_to__0_ { get { return ResourceManager.GetString("ToolUpdatesDlg_InstallUpdates_Installing_updates_to__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to User cancelled installation. /// </summary> public static string ToolUpdatesDlg_InstallUpdates_User_cancelled_installation { get { return ResourceManager.GetString("ToolUpdatesDlg_InstallUpdates_User_cancelled_installation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete the transition &apos;{0}&apos;?. /// </summary> public static string Transition_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_transition___0___ { get { return ResourceManager.GetString("Transition_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_the_transition__" + "_0___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Are you sure you want to delete these {0} transitions?. /// </summary> public static string Transition_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__transitions_ { get { return ResourceManager.GetString("Transition_GetDeleteConfirmation_Are_you_sure_you_want_to_delete_these__0__transi" + "tions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to precursor. /// </summary> public static string Transition_ToString_precursor { get { return ResourceManager.GetString("Transition_ToString_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A transition of ion type {0} can&apos;t have a custom ion. /// </summary> public static string Transition_Validate_A_transition_of_ion_type__0__can_t_have_a_custom_ion { get { return ResourceManager.GetString("Transition_Validate_A_transition_of_ion_type__0__can_t_have_a_custom_ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A transition of ion type {0} must have a custom ion.. /// </summary> public static string Transition_Validate_A_transition_of_ion_type__0__must_have_a_custom_ion_ { get { return ResourceManager.GetString("Transition_Validate_A_transition_of_ion_type__0__must_have_a_custom_ion_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fragment decoy mass shift {0} must be between {1} and {2}.. /// </summary> public static string Transition_Validate_Fragment_decoy_mass_shift__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("Transition_Validate_Fragment_decoy_mass_shift__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fragment ordinal {0} exceeds the maximum {1} for the peptide {2}.. /// </summary> public static string Transition_Validate_Fragment_ordinal__0__exceeds_the_maximum__1__for_the_peptide__2__ { get { return ResourceManager.GetString("Transition_Validate_Fragment_ordinal__0__exceeds_the_maximum__1__for_the_peptide_" + "_2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Fragment ordinal {0} may not be less than 1.. /// </summary> public static string Transition_Validate_Fragment_ordinal__0__may_not_be_less_than__1__ { get { return ResourceManager.GetString("Transition_Validate_Fragment_ordinal__0__may_not_be_less_than__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor and product ion polarity do not agree.. /// </summary> public static string Transition_Validate_Precursor_and_product_ion_polarity_do_not_agree_ { get { return ResourceManager.GetString("Transition_Validate_Precursor_and_product_ion_polarity_do_not_agree_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor charge {0} must be between {1} and {2}.. /// </summary> public static string Transition_Validate_Precursor_charge__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("Transition_Validate_Precursor_charge__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor charge {0} must be non-zero and between {1} and {2}.. /// </summary> public static string Transition_Validate_Precursor_charge__0__must_be_non_zero_and_between__1__and__2__ { get { return ResourceManager.GetString("Transition_Validate_Precursor_charge__0__must_be_non_zero_and_between__1__and__2_" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor ordinal must be the length of the peptide.. /// </summary> public static string Transition_Validate_Precursor_ordinal_must_be_the_lenght_of_the_peptide { get { return ResourceManager.GetString("Transition_Validate_Precursor_ordinal_must_be_the_lenght_of_the_peptide", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product ion charge {0} must be between {1} and {2}.. /// </summary> public static string Transition_Validate_Product_ion_charge__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("Transition_Validate_Product_ion_charge__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product ion charge {0} must be non-zero and between {1} and {2}.. /// </summary> public static string Transition_Validate_Product_ion_charge__0__must_be_non_zero_and_between__1__and__2__ { get { return ResourceManager.GetString("Transition_Validate_Product_ion_charge__0__must_be_non_zero_and_between__1__and__" + "2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Imported spectrum appears to be missing m/z or intensity values ({0} != {1}). /// </summary> public static string TransitionBinner_BinData_ { get { return ResourceManager.GetString("TransitionBinner_BinData_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Duplicate or out of order peak in transition {0}. /// </summary> public static string TransitionDocNode_ChangePeak_Duplicate_or_out_of_order_peak_in_transition__0_ { get { return ResourceManager.GetString("TransitionDocNode_ChangePeak_Duplicate_or_out_of_order_peak_in_transition__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 ion. /// </summary> public static string TransitionFilter_FragmentEndFinders_1_ion { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_1_ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 2 ions. /// </summary> public static string TransitionFilter_FragmentEndFinders_2_ions { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_2_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 3 ions. /// </summary> public static string TransitionFilter_FragmentEndFinders_3_ions { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_3_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 4 ions. /// </summary> public static string TransitionFilter_FragmentEndFinders_4_ions { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_4_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 5 ions. /// </summary> public static string TransitionFilter_FragmentEndFinders_5_ions { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_5_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 6 ions. /// </summary> public static string TransitionFilter_FragmentEndFinders_6_ions { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_6_ions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to last ion. /// </summary> public static string TransitionFilter_FragmentEndFinders_last_ion { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_last_ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to last ion - 1. /// </summary> public static string TransitionFilter_FragmentEndFinders_last_ion_minus_1 { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_last_ion_minus_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to last ion - 2. /// </summary> public static string TransitionFilter_FragmentEndFinders_last_ion_minus_2 { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_last_ion_minus_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to last ion - 3. /// </summary> public static string TransitionFilter_FragmentEndFinders_last_ion_minus_3 { get { return ResourceManager.GetString("TransitionFilter_FragmentEndFinders_last_ion_minus_3", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unsupported first fragment name {0}.. /// </summary> public static string TransitionFilter_FragmentRangeFirstName_Unsupported_first_fragment_name__0__ { get { return ResourceManager.GetString("TransitionFilter_FragmentRangeFirstName_Unsupported_first_fragment_name__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unsupported last fragment name {0}.. /// </summary> public static string TransitionFilter_FragmentRangeLastName_Unsupported_last_fragment_name__0__ { get { return ResourceManager.GetString("TransitionFilter_FragmentRangeLastName_Unsupported_last_fragment_name__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ion 1. /// </summary> public static string TransitionFilter_FragmentStartFinders_ion_1 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_ion_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ion 2. /// </summary> public static string TransitionFilter_FragmentStartFinders_ion_2 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_ion_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ion 3. /// </summary> public static string TransitionFilter_FragmentStartFinders_ion_3 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_ion_3", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ion 4. /// </summary> public static string TransitionFilter_FragmentStartFinders_ion_4 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_ion_4", resourceCulture); } } /// <summary> /// Looks up a localized string similar to m/z &gt; precursor. /// </summary> public static string TransitionFilter_FragmentStartFinders_m_z_precursor { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_m_z_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (m/z &gt; precursor) - 1. /// </summary> public static string TransitionFilter_FragmentStartFinders_m_z_precursor_minus_1 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_m_z_precursor_minus_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (m/z &gt; precursor) - 2. /// </summary> public static string TransitionFilter_FragmentStartFinders_m_z_precursor_minus_2 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_m_z_precursor_minus_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (m/z &gt; precursor) + 1. /// </summary> public static string TransitionFilter_FragmentStartFinders_m_z_precursor_plus_1 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_m_z_precursor_plus_1", resourceCulture); } } /// <summary> /// Looks up a localized string similar to (m/z &gt; precursor) + 2. /// </summary> public static string TransitionFilter_FragmentStartFinders_m_z_precursor_plus_2 { get { return ResourceManager.GetString("TransitionFilter_FragmentStartFinders_m_z_precursor_plus_2", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unknown fragment name in Transition Settings &apos;{0}&apos;. /// </summary> public static string TransitionFilter_GetEndFragmentFinder_Unknown_fragment_name_in_Transition_Settings__0__ { get { return ResourceManager.GetString("TransitionFilter_GetEndFragmentFinder_Unknown_fragment_name_in_Transition_Setting" + "s__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The label {0} is not a valid end fragment filter.. /// </summary> public static string TransitionFilter_GetEndFragmentNameFromLabel_The_label__0__is_not_a_valid_end_fragment_filter_ { get { return ResourceManager.GetString("TransitionFilter_GetEndFragmentNameFromLabel_The_label__0__is_not_a_valid_end_fra" + "gment_filter_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The label {0} is not a valid start fragment filter.. /// </summary> public static string TransitionFilter_GetStartFragmentNameFromLabel_The_label__0__is_not_a_valid_start_fragment_filter_ { get { return ResourceManager.GetString("TransitionFilter_GetStartFragmentNameFromLabel_The_label__0__is_not_a_valid_start" + "_fragment_filter_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to At least one ion type is required.. /// </summary> public static string TransitionFilter_IonTypes_At_least_one_ion_type_is_required { get { return ResourceManager.GetString("TransitionFilter_IonTypes_At_least_one_ion_type_is_required", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor charges. /// </summary> public static string TransitionFilter_PrecursorCharges_Precursor_charges { get { return ResourceManager.GetString("TransitionFilter_PrecursorCharges_Precursor_charges", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product ion charges. /// </summary> public static string TransitionFilter_ProductCharges_Product_ion_charges { get { return ResourceManager.GetString("TransitionFilter_ProductCharges_Product_ion_charges", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule precursor adducts. /// </summary> public static string TransitionFilter_SmallMoleculePrecursorAdducts_Small_molecule_precursor_adducts { get { return ResourceManager.GetString("TransitionFilter_SmallMoleculePrecursorAdducts_Small_molecule_precursor_adducts", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A precursor exclusion window must be between {0} and {1}.. /// </summary> public static string TransitionFilter_Validate_A_precursor_exclusion_window_must_be_between__0__and__1__ { get { return ResourceManager.GetString("TransitionFilter_Validate_A_precursor_exclusion_window_must_be_between__0__and__1" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} cannot be empty.. /// </summary> public static string TransitionFilter_ValidateCharges__0__cannot_be_empty { get { return ResourceManager.GetString("TransitionFilter_ValidateCharges__0__cannot_be_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid charge {1} found. {0} must be between {2} and {3}.. /// </summary> public static string TransitionFilter_ValidateCharges_Invalid_charge__1__found__0__must_be_between__2__and__3__ { get { return ResourceManager.GetString("TransitionFilter_ValidateCharges_Invalid_charge__1__found__0__must_be_between__2_" + "_and__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor charges specified charge {0} more than once.. /// </summary> public static string TransitionFilter_ValidateCharges_Precursor_charges_specified_charge__0__more_than_once { get { return ResourceManager.GetString("TransitionFilter_ValidateCharges_Precursor_charges_specified_charge__0__more_than" + "_once", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tried to create an isolation scheme for non-DIA mode. /// </summary> public static string TransitionFullScan_CreateIsolationSchemeForFilter_Tried_to_create_an_isolation_scheme_for_non_DIA_mode { get { return ResourceManager.GetString("TransitionFullScan_CreateIsolationSchemeForFilter_Tried_to_create_an_isolation_sc" + "heme_for_non_DIA_mode", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Tried to create an isolation scheme without precursor filter. /// </summary> public static string TransitionFullScan_CreateIsolationSchemeForFilter_Tried_to_create_an_isolation_scheme_without_precursor_filter { get { return ResourceManager.GetString("TransitionFullScan_CreateIsolationSchemeForFilter_Tried_to_create_an_isolation_sc" + "heme_without_precursor_filter", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An isolation window width value is not allowed in Targeted mode.. /// </summary> public static string TransitionFullScan_DoValidate_An_isolation_window_width_value_is_not_allowed_in_Targeted_mode { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_An_isolation_window_width_value_is_not_allowed_in_T" + "argeted_mode", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An isolation window width value is required in DIA mode.. /// </summary> public static string TransitionFullScan_DoValidate_An_isolation_window_width_value_is_required_in_DIA_mode { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_An_isolation_window_width_value_is_required_in_DIA_" + "mode", resourceCulture); } } /// <summary> /// Looks up a localized string similar to For MS1 filtering with a QIT mass analyzer only 1 isotope peak is supported.. /// </summary> public static string TransitionFullScan_DoValidate_For_MS1_filtering_with_a_QIT_mass_analyzer_only_1_isotope_peak_is_supported { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_For_MS1_filtering_with_a_QIT_mass_analyzer_only_1_i" + "sotope_peak_is_supported", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No other full-scan MS1 filter settings are allowed when no precursor isotopes are included.. /// </summary> public static string TransitionFullScan_DoValidate_No_other_full_scan_MS1_filter_settings_are_allowed_when_no_precursor_isotopes_are_included { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_No_other_full_scan_MS1_filter_settings_are_allowed_" + "when_no_precursor_isotopes_are_included", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor isotope count for MS1 filtering must be between {0} and {1} peaks.. /// </summary> public static string TransitionFullScan_DoValidate_The_precursor_isotope_count_for_MS1_filtering_must_be_between__0__and__1__peaks { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_The_precursor_isotope_count_for_MS1_filtering_must_" + "be_between__0__and__1__peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor isotope percent for MS1 filtering must be between {0}% and {1}% of the base peak.. /// </summary> public static string TransitionFullScan_DoValidate_The_precursor_isotope_percent_for_MS1_filtering_must_be_between__0___and__1___of_the_base_peak { get { return ResourceManager.GetString("TransitionFullScan_DoValidate_The_precursor_isotope_percent_for_MS1_filtering_mus" + "t_be_between__0___and__1___of_the_base_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Mass accuracy must be between {0} and {1} for centroided data.. /// </summary> public static string TransitionFullScan_ValidateRes_Mass_accuracy_must_be_between__0__and__1__for_centroided_data_ { get { return ResourceManager.GetString("TransitionFullScan_ValidateRes_Mass_accuracy_must_be_between__0__and__1__for_cent" + "roided_data_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolution must be between {0} and {1} for QIT.. /// </summary> public static string TransitionFullScan_ValidateRes_Resolution_must_be_between__0__and__1__for_QIT_ { get { return ResourceManager.GetString("TransitionFullScan_ValidateRes_Resolution_must_be_between__0__and__1__for_QIT_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Resolving power must be between {0} and {1} for {2}.. /// </summary> public static string TransitionFullScan_ValidateRes_Resolving_power_must_be_between__0__and__1__for__2__ { get { return ResourceManager.GetString("TransitionFullScan_ValidateRes_Resolving_power_must_be_between__0__and__1__for__2" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The m/z value at which the resolving power is calibrated is required for {0}.. /// </summary> public static string TransitionFullScan_ValidateRes_The_mz_value_at_which_the_resolving_power_is_calibrated_is_required_for__0__ { get { return ResourceManager.GetString("TransitionFullScan_ValidateRes_The_mz_value_at_which_the_resolving_power_is_calib" + "rated_is_required_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected resolving power m/z value for {0}. /// </summary> public static string TransitionFullScan_ValidateRes_Unexpected_resolving_power_mz_value_for__0__ { get { return ResourceManager.GetString("TransitionFullScan_ValidateRes_Unexpected_resolving_power_mz_value_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results {0:0.##},{1:0.##} m/z. /// </summary> public static string TransitionFullScanCreateIsolationSchemeForFilterResults__0__0__1_0__Th { get { return ResourceManager.GetString("TransitionFullScanCreateIsolationSchemeForFilterResults__0__0__1_0__Th", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Results {0:0.##} m/z. /// </summary> public static string TransitionFullScanCreateIsolationSchemeForFilterResults__0__0__Th { get { return ResourceManager.GetString("TransitionFullScanCreateIsolationSchemeForFilterResults__0__0__Th", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap TransitionGroup { get { object obj = ResourceManager.GetObject("TransitionGroup", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Precursor charge {0} must be between {1} and {2}.. /// </summary> public static string TransitionGroup_Validate_Precursor_charge__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionGroup_Validate_Precursor_charge__0__must_be_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor decoy mass shift {0} must be between {1} and {2}.. /// </summary> public static string TransitionGroup_Validate_Precursor_decoy_mass_shift__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionGroup_Validate_Precursor_decoy_mass_shift__0__must_be_between__1__and__" + "2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Grouping transitions from file {0} with file {1}. /// </summary> public static string TransitionGroupChromInfoCalculator_AddChromInfo_Grouping_transitions_from_file__0__with_file__1__ { get { return ResourceManager.GetString("TransitionGroupChromInfoCalculator_AddChromInfo_Grouping_transitions_from_file__0" + "__with_file__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to add integration information for missing file.. /// </summary> public static string TransitionGroupChromInfoListCalculator_AddChromInfoList_Attempt_to_add_integration_information_for_missing_file { get { return ResourceManager.GetString("TransitionGroupChromInfoListCalculator_AddChromInfoList_Attempt_to_add_integratio" + "n_information_for_missing_file", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap TransitionGroupDecoy { get { object obj = ResourceManager.GetObject("TransitionGroupDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Missing End Time In Change Peak. /// </summary> public static string TransitionGroupDocNode_ChangePeak_Missing_End_Time_In_Change_Peak { get { return ResourceManager.GetString("TransitionGroupDocNode_ChangePeak_Missing_End_Time_In_Change_Peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Missing Start Time in Change Peak. /// </summary> public static string TransitionGroupDocNode_ChangePeak_Missing_Start_Time_in_Change_Peak { get { return ResourceManager.GetString("TransitionGroupDocNode_ChangePeak_Missing_Start_Time_in_Change_Peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peak found at {0:F01}. /// </summary> public static string TransitionGroupDocNode_ChangePeak_No_peak_found_at__0__ { get { return ResourceManager.GetString("TransitionGroupDocNode_ChangePeak_No_peak_found_at__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File Id {0} does not match any file in document.. /// </summary> public static string TransitionGroupDocNode_ChangePrecursorAnnotations_File_Id__0__does_not_match_any_file_in_document_ { get { return ResourceManager.GetString("TransitionGroupDocNode_ChangePrecursorAnnotations_File_Id__0__does_not_match_any_" + "file_in_document_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap TransitionGroupLib { get { object obj = ResourceManager.GetObject("TransitionGroupLib", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap TransitionGroupLibDecoy { get { object obj = ResourceManager.GetObject("TransitionGroupLibDecoy", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Invalid attempt to get choices for a node that has not been added to the tree yet.. /// </summary> public static string TransitionGroupTreeNode_GetChoices_Invalid_attempt_to_get_choices_for_a_node_that_has_not_been_added_to_the_tree_yet { get { return ResourceManager.GetString("TransitionGroupTreeNode_GetChoices_Invalid_attempt_to_get_choices_for_a_node_that" + "_has_not_been_added_to_the_tree_yet", resourceCulture); } } /// <summary> /// Looks up a localized string similar to total ratio {0}. /// </summary> public static string TransitionGroupTreeNode_GetResultsText_total_ratio__0__ { get { return ResourceManager.GetString("TransitionGroupTreeNode_GetResultsText_total_ratio__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoy Mass Shift. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Decoy_Mass_Shift { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Decoy_Mass_Shift", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modified. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Modified { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Modified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Molecule { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Molecule", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor charge. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Precursor_charge { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Precursor_charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m+h. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Precursor_mh { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Precursor_mh", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor m/z. /// </summary> public static string TransitionGroupTreeNode_RenderTip_Precursor_mz { get { return ResourceManager.GetString("TransitionGroupTreeNode_RenderTip_Precursor_mz", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Synchronize isotope label types. /// </summary> public static string TransitionGroupTreeNode_SynchSiblingsLabel_Synchronize_isotope_label_types { get { return ResourceManager.GetString("TransitionGroupTreeNode_SynchSiblingsLabel_Synchronize_isotope_label_types", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Precursor. /// </summary> public static string TransitionGroupTreeNode_Title { get { return ResourceManager.GetString("TransitionGroupTreeNode_Title", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid loss index {0} for modification {1}. /// </summary> public static string TransitionInfo_ReadTransitionLosses_Invalid_loss_index__0__for_modification__1__ { get { return ResourceManager.GetString("TransitionInfo_ReadTransitionLosses_Invalid_loss_index__0__for_modification__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No modification named {0} was found in this document.. /// </summary> public static string TransitionInfo_ReadTransitionLosses_No_modification_named__0__was_found_in_this_document { get { return ResourceManager.GetString("TransitionInfo_ReadTransitionLosses_No_modification_named__0__was_found_in_this_d" + "ocument", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The reporter ion {0} was not found in the transition filter settings.. /// </summary> public static string TransitionInfo_ReadXmlAttributes_The_reporter_ion__0__was_not_found_in_the_transition_filter_settings_ { get { return ResourceManager.GetString("TransitionInfo_ReadXmlAttributes_The_reporter_ion__0__was_not_found_in_the_transi" + "tion_filter_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Instrument maximum m/z value {0} is less than {1} from minimum {2}.. /// </summary> public static string TransitionInstrument_DoValidate_Instrument_maximum_m_z_value__0__is_less_than__1__from_minimum__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_Instrument_maximum_m_z_value__0__is_less_than__1_" + "_from_minimum__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Instrument maximum m/z exceeds allowable maximum {0}.. /// </summary> public static string TransitionInstrument_DoValidate_Instrument_maximum_mz_exceeds_allowable_maximum__0__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_Instrument_maximum_mz_exceeds_allowable_maximum__" + "0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Instrument minimum m/z value {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_Instrument_minimum_mz_value__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_Instrument_minimum_mz_value__0__must_be_between__" + "1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No other full-scan MS/MS filter settings are allowed when precursor filter is none.. /// </summary> public static string TransitionInstrument_DoValidate_No_other_full_scan_MS_MS_filter_settings_are_allowed_when_precursor_filter_is_none { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_No_other_full_scan_MS_MS_filter_settings_are_allo" + "wed_when_precursor_filter_is_none", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The allowable retention time range {0} to {1} must be at least {2} minutes apart.. /// </summary> public static string TransitionInstrument_DoValidate_The_allowable_retention_time_range__0__to__1__must_be_at_least__2__minutes_apart_ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_allowable_retention_time_range__0__to__1__mus" + "t_be_at_least__2__minutes_apart_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The maximum number of inclusions {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_The_maximum_number_of_inclusions__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_maximum_number_of_inclusions__0__must_be_betw" + "een__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The maximum number of transitions {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_The_maximum_number_of_transitions__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_maximum_number_of_transitions__0__must_be_bet" + "ween__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The maximum retention time {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_The_maximum_retention_time__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_maximum_retention_time__0__must_be_between__1" + "__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The minimum retention time {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_The_minimum_retention_time__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_minimum_retention_time__0__must_be_between__1" + "__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The m/z match tolerance {0} must be between {1} and {2}.. /// </summary> public static string TransitionInstrument_DoValidate_The_mz_match_tolerance__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_mz_match_tolerance__0__must_be_between__1__an" + "d__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z filter must be between {0} and {1}. /// </summary> public static string TransitionInstrument_DoValidate_The_precursor_mz_filter_must_be_between__0__and__1__ { get { return ResourceManager.GetString("TransitionInstrument_DoValidate_The_precursor_mz_filter_must_be_between__0__and__" + "1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library ion count value {0} must be between {1} and {2}.. /// </summary> public static string TransitionLibraries_DoValidate_Library_ion_count_value__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionLibraries_DoValidate_Library_ion_count_value__0__must_be_between__1__an" + "d__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library ion count value {0} must not be less than min ion count value {1}.. /// </summary> public static string TransitionLibraries_DoValidate_Library_ion_count_value__0__must_not_be_less_than_min_ion_count_value__1__ { get { return ResourceManager.GetString("TransitionLibraries_DoValidate_Library_ion_count_value__0__must_not_be_less_than_" + "min_ion_count_value__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library ion match tolerance value {0} must be between {1} and {2}.. /// </summary> public static string TransitionLibraries_DoValidate_Library_ion_match_tolerance_value__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionLibraries_DoValidate_Library_ion_match_tolerance_value__0__must_be_betw" + "een__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library min ion count value {0} must be between {1} and {2}.. /// </summary> public static string TransitionLibraries_DoValidate_Library_min_ion_count_value__0__must_be_between__1__and__2__ { get { return ResourceManager.GetString("TransitionLibraries_DoValidate_Library_min_ion_count_value__0__must_be_between__1" + "__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expected loss {0} not found in the modification {1}. /// </summary> public static string TransitionLoss_LossIndex_Expected_loss__0__not_found_in_the_modification__1_ { get { return ResourceManager.GetString("TransitionLoss_LossIndex_Expected_loss__0__not_found_in_the_modification__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition prediction requires a collision energy regression function.. /// </summary> public static string TransitionPrediction_DoValidate_Transition_prediction_requires_a_collision_energy_regression_function { get { return ResourceManager.GetString("TransitionPrediction_DoValidate_Transition_prediction_requires_a_collision_energy" + "_regression_function", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Compensation voltage parameters must be selected in the Prediction tab of the Transition Settings in order to import optimization data for compensation voltage. /// </summary> public static string TransitionPrediction_GetOptimizeFunction_Compensation_voltage_parameters_must_be_selected_in_the_Prediction_tab_of_the_Transition_Settings_in_order_to_import_optimization_data_for_compensation_voltage { get { return ResourceManager.GetString("TransitionPrediction_GetOptimizeFunction_Compensation_voltage_parameters_must_be_" + "selected_in_the_Prediction_tab_of_the_Transition_Settings_in_order_to_import_opt" + "imization_data_for_compensation_voltage", resourceCulture); } } /// <summary> /// Looks up a localized string similar to High resolution MS1 filtering requires use of monoisotopic precursor masses.. /// </summary> public static string TransitionSettings_DoValidate_High_resolution_MS1_filtering_requires_use_of_monoisotopic_precursor_masses { get { return ResourceManager.GetString("TransitionSettings_DoValidate_High_resolution_MS1_filtering_requires_use_of_monoi" + "sotopic_precursor_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The instrument&apos;s firmware inclusion limit must be specified before doing a multiplexed DIA scan.. /// </summary> public static string TransitionSettings_DoValidate_The_instrument_s_firmware_inclusion_limit_must_be_specified_before_doing_a_multiplexed_DIA_scan { get { return ResourceManager.GetString("TransitionSettings_DoValidate_The_instrument_s_firmware_inclusion_limit_must_be_s" + "pecified_before_doing_a_multiplexed_DIA_scan", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Max m/z {0} must not be less than min m/z {1}.. /// </summary> public static string TransitionSettingsControl_GetTransitionSettings_Max_m_z__0__must_not_be_less_than_min_m_z__1__ { get { return ResourceManager.GetString("TransitionSettingsControl_GetTransitionSettings_Max_m_z__0__must_not_be_less_than" + "_min_m_z__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} must contain a comma separated list of integers describing charge states between {1} and {2}.. /// </summary> public static string TransitionSettingsControl_ValidateAdductListTextBox__0__must_contain_a_comma_separated_list_of_integers_describing_charge_states_between__1__and__2__ { get { return ResourceManager.GetString("TransitionSettingsControl_ValidateAdductListTextBox__0__must_contain_a_comma_sepa" + "rated_list_of_integers_describing_charge_states_between__1__and__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Min % of base pea&amp;k:. /// </summary> public static string TransitionSettingsUI_comboPrecursorIsotopes_SelectedIndexChanged_Min_percent_of_base_peak { get { return ResourceManager.GetString("TransitionSettingsUI_comboPrecursorIsotopes_SelectedIndexChanged_Min_percent_of_b" + "ase_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Pea&amp;ks:. /// </summary> public static string TransitionSettingsUI_comboPrecursorIsotopes_SelectedIndexChanged_Peaks { get { return ResourceManager.GetString("TransitionSettingsUI_comboPrecursorIsotopes_SelectedIndexChanged_Peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An isolation scheme is required to match multiple precursors.. /// </summary> public static string TransitionSettingsUI_OkDialog_An_isolation_scheme_is_required_to_match_multiple_precursors { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_An_isolation_scheme_is_required_to_match_multiple_p" + "recursors", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Before performing a multiplexed DIA scan, the instrument&apos;s firmware inclusion limit must be specified.. /// </summary> public static string TransitionSettingsUI_OkDialog_Before_performing_a_multiplexed_DIA_scan_the_instrument_s_firmware_inclusion_limit_must_be_specified { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Before_performing_a_multiplexed_DIA_scan_the_instru" + "ment_s_firmware_inclusion_limit_must_be_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot use DIA window for precursor exclusion when isolation scheme does not contain prespecified windows. Please select an isolation scheme with prespecified windows.. /// </summary> public static string TransitionSettingsUI_OkDialog_Cannot_use_DIA_window_for_precursor_exclusion_when_isolation_scheme_does_not_contain_prespecified_windows___Please_select_an_isolation_scheme_with_prespecified_windows_ { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Cannot_use_DIA_window_for_precursor_exclusion_when_" + "isolation_scheme_does_not_contain_prespecified_windows___Please_select_an_isolat" + "ion_scheme_with_prespecified_windows_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot use DIA window for precusor exclusion when &apos;All Ions&apos; is selected as the isolation scheme. To use the DIA window for precusor exclusion, change the isolation scheme in the Full Scan settings.. /// </summary> public static string TransitionSettingsUI_OkDialog_Cannot_use_DIA_window_for_precusor_exclusion_when__All_Ions__is_selected_as_the_isolation_scheme___To_use_the_DIA_window_for_precusor_exclusion__change_the_isolation_scheme_in_the_Full_Scan_settings_ { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Cannot_use_DIA_window_for_precusor_exclusion_when__" + "All_Ions__is_selected_as_the_isolation_scheme___To_use_the_DIA_window_for_precus" + "or_exclusion__change_the_isolation_scheme_in_the_Full_Scan_settings_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Changing transition settings. /// </summary> public static string TransitionSettingsUI_OkDialog_Changing_transition_settings { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Changing_transition_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to For MS1 filtering with a QIT mass analyzer only 1 isotope peak is supported.. /// </summary> public static string TransitionSettingsUI_OkDialog_For_MS1_filtering_with_a_QIT_mass_analyzer_only_1_isotope_peak_is_supported { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_For_MS1_filtering_with_a_QIT_mass_analyzer_only_1_i" + "sotope_peak_is_supported", resourceCulture); } } /// <summary> /// Looks up a localized string similar to High resolution MS1 filtering requires use of monoisotopic precursor masses.. /// </summary> public static string TransitionSettingsUI_OkDialog_High_resolution_MS1_filtering_requires_use_of_monoisotopic_precursor_masses { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_High_resolution_MS1_filtering_requires_use_of_monoi" + "sotopic_precursor_masses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion types must contain a comma separated list of ion types a, b, c, x, y, z and p (for precursor).. /// </summary> public static string TransitionSettingsUI_OkDialog_Ion_types_must_contain_a_comma_separated_list_of_ion_types_a_b_c_x_y_z_and_p_for_precursor { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Ion_types_must_contain_a_comma_separated_list_of_io" + "n_types_a_b_c_x_y_z_and_p_for_precursor", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Isotope enrichment settings are required for MS1 filtering on high resolution mass spectrometers.. /// </summary> public static string TransitionSettingsUI_OkDialog_Isotope_enrichment_settings_are_required_for_MS1_filtering_on_high_resolution_mass_spectrometers { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Isotope_enrichment_settings_are_required_for_MS1_fi" + "ltering_on_high_resolution_mass_spectrometers", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecule ion types must contain a comma separated list of ion types. Valid types are &quot;f&quot; (for fragment) and/or &quot;p&quot; (for precursor). /// </summary> public static string TransitionSettingsUI_OkDialog_Small_molecule_ion_types_must_contain_a_comma_separated_list_of_ion_types__Valid_types_are__f___for_fragment__and_or__p___for_precursor_ { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_Small_molecule_ion_types_must_contain_a_comma_separ" + "ated_list_of_ion_types__Valid_types_are__f___for_fragment__and_or__p___for_precu" + "rsor_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The allowable retention time range {0} to {1} must be at least {2} minutes apart.. /// </summary> public static string TransitionSettingsUI_OkDialog_The_allowable_retention_time_range__0__to__1__must_be_at_least__2__minutes_apart { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_The_allowable_retention_time_range__0__to__1__must_" + "be_at_least__2__minutes_apart", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This is not a valid number of minutes.. /// </summary> public static string TransitionSettingsUI_OkDialog_This_is_not_a_valid_number_of_minutes { get { return ResourceManager.GetString("TransitionSettingsUI_OkDialog_This_is_not_a_valid_number_of_minutes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adducts---. /// </summary> public static string TransitionSettingsUI_PopulateAdductMenu_Adducts_minusminusminus { get { return ResourceManager.GetString("TransitionSettingsUI_PopulateAdductMenu_Adducts_minusminusminus", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adducts+++. /// </summary> public static string TransitionSettingsUI_PopulateAdductMenu_Adducts_plusplusplus { get { return ResourceManager.GetString("TransitionSettingsUI_PopulateAdductMenu_Adducts_plusplusplus", resourceCulture); } } /// <summary> /// Looks up a localized string similar to More. /// </summary> public static string TransitionSettingsUI_PopulateAdductMenu_More { get { return ResourceManager.GetString("TransitionSettingsUI_PopulateAdductMenu_More", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Res&amp;olution:. /// </summary> public static string TransitionSettingsUI_SetAnalyzerType_Resolution { get { return ResourceManager.GetString("TransitionSettingsUI_SetAnalyzerType_Resolution", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Res&amp;olving power:. /// </summary> public static string TransitionSettingsUI_SetAnalyzerType_Resolving_power { get { return ResourceManager.GetString("TransitionSettingsUI_SetAnalyzerType_Resolving_power", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} [{1}]. /// </summary> public static string TransitionTreeNode_GetLabel__0__1__ { get { return ResourceManager.GetString("TransitionTreeNode_GetLabel__0__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to irank {0}. /// </summary> public static string TransitionTreeNode_GetLabel_irank__0__ { get { return ResourceManager.GetString("TransitionTreeNode_GetLabel_irank__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to rank {0}. /// </summary> public static string TransitionTreeNode_GetLabel_rank__0__ { get { return ResourceManager.GetString("TransitionTreeNode_GetLabel_rank__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to [{0}]. /// </summary> public static string TransitionTreeNode_GetResultsText__0__ { get { return ResourceManager.GetString("TransitionTreeNode_GetResultsText__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} (ratio {1}). /// </summary> public static string TransitionTreeNode_GetResultsText__0__ratio__1__ { get { return ResourceManager.GetString("TransitionTreeNode_GetResultsText__0__ratio__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Charge. /// </summary> public static string TransitionTreeNode_RenderTip_Charge { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Charge", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Decoy Mass Shift. /// </summary> public static string TransitionTreeNode_RenderTip_Decoy_Mass_Shift { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Decoy_Mass_Shift", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Formula. /// </summary> public static string TransitionTreeNode_RenderTip_Formula { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Formula", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Ion. /// </summary> public static string TransitionTreeNode_RenderTip_Ion { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Ion", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library intensity. /// </summary> public static string TransitionTreeNode_RenderTip_Library_intensity { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Library_intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library rank. /// </summary> public static string TransitionTreeNode_RenderTip_Library_rank { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Library_rank", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loss. /// </summary> public static string TransitionTreeNode_RenderTip_Loss { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Loss", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Losses. /// </summary> public static string TransitionTreeNode_RenderTip_Losses { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Losses", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Product m/z. /// </summary> public static string TransitionTreeNode_RenderTip_Product_m_z { get { return ResourceManager.GetString("TransitionTreeNode_RenderTip_Product_m_z", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transition. /// </summary> public static string TransitionTreeNode_Title { get { return ResourceManager.GetString("TransitionTreeNode_Title", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Transitions. /// </summary> public static string TransitionTreeNode_Titles { get { return ResourceManager.GetString("TransitionTreeNode_Titles", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Truncated peaks. /// </summary> public static string TruncatedPeakFinder_DisplayName_Truncated_peaks { get { return ResourceManager.GetString("TruncatedPeakFinder_DisplayName_Truncated_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Truncated peak. /// </summary> public static string TruncatedPeakFinder_MatchTransition_Truncated_peak { get { return ResourceManager.GetString("TruncatedPeakFinder_MatchTransition_Truncated_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} truncated peaks. /// </summary> public static string TruncatedPeakFinder_MatchTransitionGroup__0__truncated_peaks { get { return ResourceManager.GetString("TruncatedPeakFinder_MatchTransitionGroup__0__truncated_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 1 truncated peak. /// </summary> public static string TruncatedPeakFinder_MatchTransitionGroup__1_truncated_peak { get { return ResourceManager.GetString("TruncatedPeakFinder_MatchTransitionGroup__1_truncated_peak", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Static mod masses have already been added for this heavy type.. /// </summary> public static string TypedExplicitModifications_AddModMasses_Static_mod_masses_have_already_been_added_for_this_heavy_type { get { return ResourceManager.GetString("TypedExplicitModifications_AddModMasses_Static_mod_masses_have_already_been_added" + "_for_this_heavy_type", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Static mod masses may not be added to light type.. /// </summary> public static string TypedExplicitModifications_AddModMasses_Static_mod_masses_may_not_be_added_to_light_type { get { return ResourceManager.GetString("TypedExplicitModifications_AddModMasses_Static_mod_masses_may_not_be_added_to_lig" + "ht_type", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap UIModeMixed { get { object obj = ResourceManager.GetObject("UIModeMixed", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap UIModeProteomic { get { object obj = ResourceManager.GetObject("UIModeProteomic", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap UIModeSmallMolecules { get { object obj = ResourceManager.GetObject("UIModeSmallMolecules", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Chromatogram information unavailable. /// </summary> public static string UnavailableChromGraphItem_UnavailableChromGraphItem_Chromatogram_information_unavailable { get { return ResourceManager.GetString("UnavailableChromGraphItem_UnavailableChromGraphItem_Chromatogram_information_unav" + "ailable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Spectrum information unavailable. /// </summary> public static string UnavailableMSGraphItem_UnavailableMSGraphItem_Spectrum_information_unavailable { get { return ResourceManager.GetString("UnavailableMSGraphItem_UnavailableMSGraphItem_Spectrum_information_unavailable", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Undo transaction may not be started in undo/redo.. /// </summary> public static string UndoManager_BeginTransaction_Undo_transaction_may_not_be_started_in_undo_redo { get { return ResourceManager.GetString("UndoManager_BeginTransaction_Undo_transaction_may_not_be_started_in_undo_redo", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Commit called with no pending undo record.. /// </summary> public static string UndoManager_Commit_Commit_called_with_no_pending_undo_record { get { return ResourceManager.GetString("UndoManager_Commit_Commit_called_with_no_pending_undo_record", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to index {0} beyond length {1}.. /// </summary> public static string UndoManager_Restore_Attempt_to_index__0__beyond_length__1__ { get { return ResourceManager.GetString("UndoManager_Restore_Attempt_to_index__0__beyond_length__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempting undo/redo inside undo transaction.. /// </summary> public static string UndoManager_Restore_Attempting_undo_redo_inside_undo_transaction { get { return ResourceManager.GetString("UndoManager_Restore_Attempting_undo_redo_inside_undo_transaction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Redo {0} Actions. /// </summary> public static string UndoRedoList_GetLabelText_Redo__0__Actions { get { return ResourceManager.GetString("UndoRedoList_GetLabelText_Redo__0__Actions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Redo 1 Action. /// </summary> public static string UndoRedoList_GetLabelText_Redo_1_Action { get { return ResourceManager.GetString("UndoRedoList_GetLabelText_Redo_1_Action", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Undo {0} Actions. /// </summary> public static string UndoRedoList_GetLabelText_Undo__0__Actions { get { return ResourceManager.GetString("UndoRedoList_GetLabelText_Undo__0__Actions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Undo 1 Action. /// </summary> public static string UndoRedoList_GetLabelText_Undo_1_Action { get { return ResourceManager.GetString("UndoRedoList_GetLabelText_Undo_1_Action", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unifi. /// </summary> public static string Unifi_Label_Unifi { get { return ResourceManager.GetString("Unifi_Label_Unifi", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Cannot find account for username {0} and server {1}.. /// </summary> public static string UnifiUrl_OpenMsDataFile_Cannot_find_account_for_username__0__and_server__1__ { get { return ResourceManager.GetString("UnifiUrl_OpenMsDataFile_Cannot_find_account_for_username__0__and_server__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unintegrated transitions. /// </summary> public static string UnintegratedTransitionFinder_DisplayName_Unintegrated_transitions { get { return ResourceManager.GetString("UnintegratedTransitionFinder_DisplayName_Unintegrated_transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unintegrated transition. /// </summary> public static string UnintegratedTransitionFinder_Match_Unintegrated_transition { get { return ResourceManager.GetString("UnintegratedTransitionFinder_Match_Unintegrated_transition", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Adding rows to grid.. /// </summary> public static string UniquePeptidesDlg_AddProteinRowsToGrid_Adding_rows_to_grid_ { get { return ResourceManager.GetString("UniquePeptidesDlg_AddProteinRowsToGrid_Adding_rows_to_grid_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} ///(gene: {1}). /// </summary> public static string UniquePeptidesDlg_LaunchPeptideProteinsQuery_ { get { return ResourceManager.GetString("UniquePeptidesDlg_LaunchPeptideProteinsQuery_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed querying background proteome {0}.. /// </summary> public static string UniquePeptidesDlg_LaunchPeptideProteinsQuery_Failed_querying_background_proteome__0__ { get { return ResourceManager.GetString("UniquePeptidesDlg_LaunchPeptideProteinsQuery_Failed_querying_background_proteome_" + "_0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Looking for proteins with matching peptide sequences. /// </summary> public static string UniquePeptidesDlg_LaunchPeptideProteinsQuery_Looking_for_proteins_with_matching_peptide_sequences { get { return ResourceManager.GetString("UniquePeptidesDlg_LaunchPeptideProteinsQuery_Looking_for_proteins_with_matching_p" + "eptide_sequences", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Querying Background Proteome Database. /// </summary> public static string UniquePeptidesDlg_LaunchPeptideProteinsQuery_Querying_Background_Proteome_Database { get { return ResourceManager.GetString("UniquePeptidesDlg_LaunchPeptideProteinsQuery_Querying_Background_Proteome_Databas" + "e", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Exclude peptides. /// </summary> public static string UniquePeptidesDlg_OkDialog_Exclude_peptides { get { return ResourceManager.GetString("UniquePeptidesDlg_OkDialog_Exclude_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The background proteome {0} has not yet finished being digested with {1}.. /// </summary> public static string UniquePeptidesDlg_OnShown_The_background_proteome__0__has_not_yet_finished_being_digested_with__1__ { get { return ResourceManager.GetString("UniquePeptidesDlg_OnShown_The_background_proteome__0__has_not_yet_finished_being_" + "digested_with__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Some background proteome proteins did not have gene information, this selection may be suspect.. /// </summary> public static string UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_Some_background_proteome_proteins_did_not_have_gene_information__this_selection_may_be_suspect_ { get { return ResourceManager.GetString("UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_Some_backgr" + "ound_proteome_proteins_did_not_have_gene_information__this_selection_may_be_susp" + "ect_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Some background proteome proteins did not have species information, this selection may be suspect.. /// </summary> public static string UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_Some_background_proteome_proteins_did_not_have_species_information__this_selection_may_be_suspect_ { get { return ResourceManager.GetString("UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_Some_backgr" + "ound_proteome_proteins_did_not_have_species_information__this_selection_may_be_s" + "uspect_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to These proteins include:. /// </summary> public static string UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_These_proteins_include_ { get { return ResourceManager.GetString("UniquePeptidesDlg_SelectPeptidesWithNumberOfMatchesAtOrBelowThreshold_These_prote" + "ins_include_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The tool &quot;{0}&quot; requires report type titled &quot;{1}&quot; and it is not provided. Import canceled.. /// </summary> public static string UnpackZipToolHelper_UnpackZipTool_The_tool___0___requires_report_type_titled___1___and_it_is_not_provided__Import_canceled_ { get { return ResourceManager.GetString("UnpackZipToolHelper_UnpackZipTool_The_tool___0___requires_report_type_titled___1_" + "__and_it_is_not_provided__Import_canceled_", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap up_pro32 { get { object obj = ResourceManager.GetObject("up_pro32", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Maybe &amp;Later. /// </summary> public static string UpgradeDlg_cbAtStartup_CheckedChanged_Maybe__Later { get { return ResourceManager.GetString("UpgradeDlg_cbAtStartup_CheckedChanged_Maybe__Later", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No update was found.. /// </summary> public static string UpgradeDlg_UpgradeDlg_No_update_was_found_ { get { return ResourceManager.GetString("UpgradeDlg_UpgradeDlg_No_update_was_found_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upgrading to {0} (downloading {1} of {2}). /// </summary> public static string UpgradeManager_GetProgressMessage_Upgrading_to__0___downloading__1__of__2__ { get { return ResourceManager.GetString("UpgradeManager_GetProgressMessage_Upgrading_to__0___downloading__1__of__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to check for an upgrade.. /// </summary> public static string UpgradeManager_updateCheck_Complete_Failed_attempting_to_check_for_an_upgrade_ { get { return ResourceManager.GetString("UpgradeManager_updateCheck_Complete_Failed_attempting_to_check_for_an_upgrade_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to upgrade.. /// </summary> public static string UpgradeManager_updateCheck_Complete_Failed_attempting_to_upgrade_ { get { return ResourceManager.GetString("UpgradeManager_updateCheck_Complete_Failed_attempting_to_upgrade_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Upgrading {0}. /// </summary> public static string UpgradeManager_updateCheck_Complete_Upgrading__0_ { get { return ResourceManager.GetString("UpgradeManager_updateCheck_Complete_Upgrading__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure uncompressing data.. /// </summary> public static string UtilDB_Uncompress_Failure_uncompressing_data { get { return ResourceManager.GetString("UtilDB_Uncompress_Failure_uncompressing_data", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No ion mobility information found. /// </summary> public static string ValidatingIonMobilityPeptide_Validate_No_ion_mobility_information_found { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_Validate_No_ion_mobility_information_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No ion mobility units found. /// </summary> public static string ValidatingIonMobilityPeptide_Validate_No_ion_mobility_units_found { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_Validate_No_ion_mobility_units_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A valid adduct description (e.g. &quot;[M+H]&quot;) must be provided.. /// </summary> public static string ValidatingIonMobilityPeptide_ValidateAdduct_A_valid_adduct_description__e_g____M_H____must_be_provided_ { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_ValidateAdduct_A_valid_adduct_description__e_g____M_" + "H____must_be_provided_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measured collisional cross section values must be valid decimal numbers greater than zero.. /// </summary> public static string ValidatingIonMobilityPeptide_ValidateCollisionalCrossSection_Measured_collisional_cross_section_values_must_be_valid_decimal_numbers_greater_than_zero_ { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_ValidateCollisionalCrossSection_Measured_collisional" + "_cross_section_values_must_be_valid_decimal_numbers_greater_than_zero_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to High energy ion mobility offsets should be empty, or express an offset value for ion mobility in high collision energy scans which may add velocity to ions.. /// </summary> public static string ValidatingIonMobilityPeptide_ValidateHighEnergyIonMobilityOffset_High_energy_ion_mobility_offsets_should_be_empty__or_express_an_offset_value_for_ion_mobility_in_high_collision_energy_scans_which_may_add_velocity_to_ions_ { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_ValidateHighEnergyIonMobilityOffset_High_energy_ion_" + "mobility_offsets_should_be_empty__or_express_an_offset_value_for_ion_mobility_in" + "_high_collision_energy_scans_which_may_add_velocity_to_ions_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A modified peptide sequence is required for each entry.. /// </summary> public static string ValidatingIonMobilityPeptide_ValidateSequence_A_modified_peptide_sequence_is_required_for_each_entry_ { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_ValidateSequence_A_modified_peptide_sequence_is_requ" + "ired_for_each_entry_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The sequence {0} is not a valid modified peptide sequence.. /// </summary> public static string ValidatingIonMobilityPeptide_ValidateSequence_The_sequence__0__is_not_a_valid_modified_peptide_sequence_ { get { return ResourceManager.GetString("ValidatingIonMobilityPeptide_ValidateSequence_The_sequence__0__is_not_a_valid_mod" + "ified_peptide_sequence_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} which requires an comma-separated list of integers.. /// </summary> public static string ValueInvalidChargeListException_ValueInvalidChargeListException_The_value___0___is_not_valid_for_the_argument__1__which_requires_an_comma_separated_list_of_integers_ { get { return ResourceManager.GetString("ValueInvalidChargeListException_ValueInvalidChargeListException_The_value___0___i" + "s_not_valid_for_the_argument__1__which_requires_an_comma_separated_list_of_integ" + "ers_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} which requires a date-time value.. /// </summary> public static string ValueInvalidDateException_ValueInvalidDateException_The_value___0___is_not_valid_for_the_argument__1__which_requires_a_date_time_value_ { get { return ResourceManager.GetString("ValueInvalidDateException_ValueInvalidDateException_The_value___0___is_not_valid_" + "for_the_argument__1__which_requires_a_date_time_value_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} which requires a decimal number.. /// </summary> public static string ValueInvalidDoubleException_ValueInvalidDoubleException_The_value___0___is_not_valid_for_the_argument__1__which_requires_a_decimal_number_ { get { return ResourceManager.GetString("ValueInvalidDoubleException_ValueInvalidDoubleException_The_value___0___is_not_va" + "lid_for_the_argument__1__which_requires_a_decimal_number_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1}. Use one of {2}. /// </summary> public static string ValueInvalidException_ValueInvalidException_The_value___0___is_not_valid_for_the_argument__1___Use_one_of__2_ { get { return ResourceManager.GetString("ValueInvalidException_ValueInvalidException_The_value___0___is_not_valid_for_the_" + "argument__1___Use_one_of__2_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} which requires an integer.. /// </summary> public static string ValueInvalidIntException_ValueInvalidIntException_The_value___0___is_not_valid_for_the_argument__1__which_requires_an_integer_ { get { return ResourceManager.GetString("ValueInvalidIntException_ValueInvalidIntException_The_value___0___is_not_valid_fo" + "r_the_argument__1__which_requires_an_integer_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} which requires an comma-separated list of fragment ion types (a, b, c, x, y, z, p).. /// </summary> public static string ValueInvalidIonTypeListException_ValueInvalidIonTypeListException_The_value___0___is_not_valid_for_the_argument__1__which_requires_an_comma_separated_list_of_fragment_ion_types__a__b__c__x__y__z__p__ { get { return ResourceManager.GetString("ValueInvalidIonTypeListException_ValueInvalidIonTypeListException_The_value___0__" + "_is_not_valid_for_the_argument__1__which_requires_an_comma_separated_list_of_fra" + "gment_ion_types__a__b__c__x__y__z__p__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value {0} is not valid for the argument {1} which requires a list of decimal numbers.. /// </summary> public static string ValueInvalidNumberListException_ValueInvalidNumberListException_The_value__0__is_not_valid_for_the_argument__1__which_requires_a_list_of_decimal_numbers_ { get { return ResourceManager.GetString("ValueInvalidNumberListException_ValueInvalidNumberListException_The_value__0__is_" + "not_valid_for_the_argument__1__which_requires_a_list_of_decimal_numbers_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the argument {1} failed attempting to convert it to a full file path.. /// </summary> public static string ValueInvalidPathException_ValueInvalidPathException_The_value___0___is_not_valid_for_the_argument__1__failed_attempting_to_convert_it_to_a_full_file_path_ { get { return ResourceManager.GetString("ValueInvalidPathException_ValueInvalidPathException_The_value___0___is_not_valid_" + "for_the_argument__1__failed_attempting_to_convert_it_to_a_full_file_path_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The argument {0} requires a value and must be specified in the format --name=value. /// </summary> public static string ValueMissingException_ValueMissingException_ { get { return ResourceManager.GetString("ValueMissingException_ValueMissingException_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; for the argument {1} must be between {2} and {3}.. /// </summary> public static string ValueOutOfRangeDoubleException_ValueOutOfRangeException_The_value___0___for_the_argument__1__must_be_between__2__and__3__ { get { return ResourceManager.GetString("ValueOutOfRangeDoubleException_ValueOutOfRangeException_The_value___0___for_the_a" + "rgument__1__must_be_between__2__and__3__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The argument {0} should not have a value specified. /// </summary> public static string ValueUnexpectedException_ValueUnexpectedException_The_argument__0__should_not_have_a_value_specified { get { return ResourceManager.GetString("ValueUnexpectedException_ValueUnexpectedException_The_argument__0__should_not_hav" + "e_a_value_specified", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Calculating hash of input file. /// </summary> public static string VendorIssueHelper_ConvertBrukerToMzml_Calculating_hash_of_input_file { get { return ResourceManager.GetString("VendorIssueHelper_ConvertBrukerToMzml_Calculating_hash_of_input_file", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CompassXport software must be installed to import Bruker raw data files.. /// </summary> public static string VendorIssueHelper_ConvertBrukerToMzml_CompassXport_software_must_be_installed_to_import_Bruker_raw_data_files_ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertBrukerToMzml_CompassXport_software_must_be_installed_to_" + "import_Bruker_raw_data_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to convert {0} to mzML using CompassXport.. /// </summary> public static string VendorIssueHelper_ConvertBrukerToMzml_Failure_attempting_to_convert__0__to_mzML_using_CompassXport_ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertBrukerToMzml_Failure_attempting_to_convert__0__to_mzML_u" + "sing_CompassXport_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to convert sample {0} in {1} to mzXML to work around a performance issue in the AB Sciex WiffFileDataReader library.. /// </summary> public static string VendorIssueHelper_ConvertLocalWiffToMzxml_Failure_attempting_to_convert_sample__0__in__1__to_mzXML_to_work_around_a_performance_issue_in_the_AB_Sciex_WiffFileDataReader_library { get { return ResourceManager.GetString("VendorIssueHelper_ConvertLocalWiffToMzxml_Failure_attempting_to_convert_sample__0" + "__in__1__to_mzXML_to_work_around_a_performance_issue_in_the_AB_Sciex_WiffFileDat" + "aReader_library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Converting {0} to xml. /// </summary> public static string VendorIssueHelper_ConvertPilotFiles_Converting__0__to_xml { get { return ResourceManager.GetString("VendorIssueHelper_ConvertPilotFiles_Converting__0__to_xml", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure attempting to convert file {0} to .group.xml.. /// </summary> public static string VendorIssueHelper_ConvertPilotFiles_Failure_attempting_to_convert_file__0__to__group_xml_ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertPilotFiles_Failure_attempting_to_convert_file__0__to__gr" + "oup_xml_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to ProteinPilot software (trial or full version) must be installed to convert &apos;.group&apos; files to compatible &apos;.group.xml&apos; files.. /// </summary> public static string VendorIssueHelper_ConvertPilotFiles_ProteinPilot_software__trial_or_full_version__must_be_installed_to_convert___group__files_to_compatible___group_xml__files_ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertPilotFiles_ProteinPilot_software__trial_or_full_version_" + "_must_be_installed_to_convert___group__files_to_compatible___group_xml__files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unable to find {0} or {1} in directory {2}. Please reinstall ProteinPilot software to be able to handle .group files.. /// </summary> public static string VendorIssueHelper_ConvertPilotFiles_Unable_to_find__0__or__1__in_directory__2____Please_reinstall_ProteinPilot_software_to_be_able_to_handle__group_files_ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertPilotFiles_Unable_to_find__0__or__1__in_directory__2____" + "Please_reinstall_ProteinPilot_software_to_be_able_to_handle__group_files_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please install Analyst, or run this import on a computure with Analyst installed. /// </summary> public static string VendorIssueHelper_ConvertWiffToMzxml_Please_install_Analyst__or_run_this_import_on_a_computure_with_Analyst_installed { get { return ResourceManager.GetString("VendorIssueHelper_ConvertWiffToMzxml_Please_install_Analyst__or_run_this_import_o" + "n_a_computure_with_Analyst_installed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file {0} cannot be imported by the AB SCIEX WiffFileDataReader library in a reasonable time frame ({1:F02} min).. /// </summary> public static string VendorIssueHelper_ConvertWiffToMzxml_The_file__0__cannot_be_imported_by_the_AB_SCIEX_WiffFileDataReader_library_in_a_reasonable_time_frame_1_F02_min { get { return ResourceManager.GetString("VendorIssueHelper_ConvertWiffToMzxml_The_file__0__cannot_be_imported_by_the_AB_SC" + "IEX_WiffFileDataReader_library_in_a_reasonable_time_frame_1_F02_min", resourceCulture); } } /// <summary> /// Looks up a localized string similar to To work around this issue requires Analyst to be installed on the computer running {0}.. /// </summary> public static string VendorIssueHelper_ConvertWiffToMzxml_To_work_around_this_issue_requires_Analyst_to_be_installed_on_the_computer_running__0__ { get { return ResourceManager.GetString("VendorIssueHelper_ConvertWiffToMzxml_To_work_around_this_issue_requires_Analyst_t" + "o_be_installed_on_the_computer_running__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Convert to mzXML work-around for {0}. /// </summary> public static string VendorIssueHelper_CreateTempFileSubstitute_Convert_to_mzXML_work_around_for__0__ { get { return ResourceManager.GetString("VendorIssueHelper_CreateTempFileSubstitute_Convert_to_mzXML_work_around_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Local copy work-around for {0}. /// </summary> public static string VendorIssueHelper_CreateTempFileSubstitute_Local_copy_work_around_for__0__ { get { return ResourceManager.GetString("VendorIssueHelper_CreateTempFileSubstitute_Local_copy_work_around_for__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Click &apos;Embedded&apos; to use embedded spectra. ///Click &apos;Retry&apos; to try building again with original spectrum files placed next to the input files (with a supported file extension: {0}).. /// </summary> public static string VendorIssueHelper_ShowLibraryMissingExternalSpectraError_ButtonDescriptionsSupportsExtensions__0__ { get { return ResourceManager.GetString("VendorIssueHelper_ShowLibraryMissingExternalSpectraError_ButtonDescriptionsSuppor" + "tsExtensions__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Could not find an external spectrum file matching &apos;{0}&apos; in the same directory as the MaxQuant input file &apos;{1}&apos;, or in its parent or grandparent directory. /// ///If you do not have the original file, you may build the library with embedded spectra from the input file. However, fragment ions in MaxQuant embedded spectra are charge state deconvoluted, and will contain only singly charged fragment ions which may not be representative of intensities measured by a mass spectrometer. /// ///. /// </summary> public static string VendorIssueHelper_ShowLibraryMissingExternalSpectraError_Could_not_find_an_external_spectrum_file_matching__0__in_the_same_directory_as_the_MaxQuant_input_file__1__ { get { return ResourceManager.GetString("VendorIssueHelper_ShowLibraryMissingExternalSpectraError_Could_not_find_an_extern" + "al_spectrum_file_matching__0__in_the_same_directory_as_the_MaxQuant_input_file__" + "1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}{1} library {2} will be ignored.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides__0__1__library__2__will_be_ignored { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides__0__1__library__2__will_be_ignored", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} and {1} library {2} will be ignored.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides__0__and__1__library__2__will_be_ignored { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides__0__and__1__library__2__will_be_ignored", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} existing. /// </summary> public static string ViewLibraryDlg_AddAllPeptides__0__existing { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides__0__existing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} proteins,. /// </summary> public static string ViewLibraryDlg_AddAllPeptides__0__proteins { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides__0__proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0} unmatched. /// </summary> public static string ViewLibraryDlg_AddAllPeptides__0__unmatched { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides__0__unmatched", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add All. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Add_All { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Add_All", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add all peptides from {0} library. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Add_all_peptides_from__0__library { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Add_all_peptides_from__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to All library peptides already exist in the current document.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_All_library_peptides_already_exist_in_the_current_document { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_All_library_peptides_already_exist_in_the_current_d" + "ocument", resourceCulture); } } /// <summary> /// Looks up a localized string similar to entries. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_entries { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_entries", resourceCulture); } } /// <summary> /// Looks up a localized string similar to entry. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_entry { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_entry", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching Molecules. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Matching_Molecules { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Matching_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching molecules to the current document settings. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Matching_molecules_to_the_current_document_settings { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Matching_molecules_to_the_current_document_settings" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching Peptides. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Matching_Peptides { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Matching_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Matching peptides to the current document settings. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Matching_peptides_to_the_current_document_settings { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Matching_peptides_to_the_current_document_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No peptides match the current document settings.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_No_peptides_match_the_current_document_settings { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_No_peptides_match_the_current_document_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Please retry this operation.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_Please_retry_this_operation { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_Please_retry_this_operation", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The document changed during processing.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_The_document_changed_during_processing { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_The_document_changed_during_processing", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This operation will add {0} {1} molecules, {2} precursors and {3} transitions to the document.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_This_operation_will_add__0__1__molecules__2__precursors_and__3__transitions_to_the_document { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_This_operation_will_add__0__1__molecules__2__precur" + "sors_and__3__transitions_to_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This operation will add {0} {1} peptides, {2} precursors and {3} transitions to the document.. /// </summary> public static string ViewLibraryDlg_AddAllPeptides_This_operation_will_add__0__1__peptides__2__precursors_and__3__transitions_to_the_document { get { return ResourceManager.GetString("ViewLibraryDlg_AddAllPeptides_This_operation_will_add__0__1__peptides__2__precurs" + "ors_and__3__transitions_to_the_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add library peptide {0}. /// </summary> public static string ViewLibraryDlg_AddPeptide_Add_library_peptide__0__ { get { return ResourceManager.GetString("ViewLibraryDlg_AddPeptide_Add_library_peptide__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Modifications for this peptide do not match current document settings.. /// </summary> public static string ViewLibraryDlg_AddPeptide_Modifications_for_this_peptide_do_not_match_current_document_settings { get { return ResourceManager.GetString("ViewLibraryDlg_AddPeptide_Modifications_for_this_peptide_do_not_match_current_doc" + "ument_settings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The peptide {0} already exists with charge {1} in the current document.. /// </summary> public static string ViewLibraryDlg_AddPeptide_The_peptide__0__already_exists_with_charge__1__in_the_current_document { get { return ResourceManager.GetString("ViewLibraryDlg_AddPeptide_The_peptide__0__already_exists_with_charge__1__in_the_c" + "urrent_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z {0:F04} is not measured by the current DIA isolation scheme.. /// </summary> public static string ViewLibraryDlg_AddPeptide_The_precursor_m_z__0_F04__is_not_measured_by_the_current_DIA_isolation_scheme_ { get { return ResourceManager.GetString("ViewLibraryDlg_AddPeptide_The_precursor_m_z__0_F04__is_not_measured_by_the_curren" + "t_DIA_isolation_scheme_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The precursor m/z {0:F04} is outside the instrument range {1} to {2}.. /// </summary> public static string ViewLibraryDlg_AddPeptide_The_precursor_m_z__0_F04__is_outside_the_instrument_range__1__to__2__ { get { return ResourceManager.GetString("ViewLibraryDlg_AddPeptide_The_precursor_m_z__0_F04__is_outside_the_instrument_ran" + "ge__1__to__2__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add Library. /// </summary> public static string ViewLibraryDlg_CheckLibraryInSettings_Add_Library { get { return ResourceManager.GetString("ViewLibraryDlg_CheckLibraryInSettings_Add_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library {0} is not currently added to your document.. /// </summary> public static string ViewLibraryDlg_CheckLibraryInSettings_The_library__0__is_not_currently_added_to_your_document { get { return ResourceManager.GetString("ViewLibraryDlg_CheckLibraryInSettings_The_library__0__is_not_currently_added_to_y" + "our_document", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Would you like to add it?. /// </summary> public static string ViewLibraryDlg_CheckLibraryInSettings_Would_you_like_to_add_it { get { return ResourceManager.GetString("ViewLibraryDlg_CheckLibraryInSettings_Would_you_like_to_add_it", resourceCulture); } } /// <summary> /// Looks up a localized string similar to A background proteome is required to associate proteins.. /// </summary> public static string ViewLibraryDlg_EnsureBackgroundProteome_A_background_proteome_is_required_to_associate_proteins { get { return ResourceManager.GetString("ViewLibraryDlg_EnsureBackgroundProteome_A_background_proteome_is_required_to_asso" + "ciate_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The background proteome must be digested in order to associate proteins.. /// </summary> public static string ViewLibraryDlg_EnsureBackgroundProteome_The_background_proteome_must_be_digested_in_order_to_associate_proteins { get { return ResourceManager.GetString("ViewLibraryDlg_EnsureBackgroundProteome_The_background_proteome_must_be_digested_" + "in_order_to_associate_proteins", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred while trying to process the file {0}.. /// </summary> public static string ViewLibraryDlg_EnsureDigested_An_error_occurred_while_trying_to_process_the_file__0__ { get { return ResourceManager.GetString("ViewLibraryDlg_EnsureDigested_An_error_occurred_while_trying_to_process_the_file_" + "_0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Copying database. /// </summary> public static string ViewLibraryDlg_EnsureDigested_Copying_database { get { return ResourceManager.GetString("ViewLibraryDlg_EnsureDigested_Copying_database", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The background proteome &apos;{0}&apos; is in an older format. In order to be able to efficiently find peptide sequences, the background proteome should be upgraded to the latest version. Do you want to upgrade the background proteome now?. /// </summary> public static string ViewLibraryDlg_EnsureDigested_The_background_proteome___0___is_in_an_older_format___In_order_to_be_able_to_efficiently_find_peptide_sequences__the_background_proteome_should_be_upgraded_to_the_latest_version___Do_you_want_to_upgrade_the_background_proteome_now_ { get { return ResourceManager.GetString(@"ViewLibraryDlg_EnsureDigested_The_background_proteome___0___is_in_an_older_format___In_order_to_be_able_to_efficiently_find_peptide_sequences__the_background_proteome_should_be_upgraded_to_the_latest_version___Do_you_want_to_upgrade_the_background_proteome_now_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to An error occurred attempting to import the {0} library.. /// </summary> public static string ViewLibraryDlg_LoadLibrary_An_error_occurred_attempting_to_import_the__0__library { get { return ResourceManager.GetString("ViewLibraryDlg_LoadLibrary_An_error_occurred_attempting_to_import_the__0__library" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading Library. /// </summary> public static string ViewLibraryDlg_LoadLibrary_Loading_Library { get { return ResourceManager.GetString("ViewLibraryDlg_LoadLibrary_Loading_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}Would you like to use the Unimod definitions for {1} modifications? The document will not change until peptides with these modifications are added.. /// </summary> public static string ViewLibraryDlg_MatchModifications__0__Would_you_like_to_use_the_Unimod_definitions_for__1__modifications_The_document_will_not_change_until_peptides_with_these_modifications_are_added { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications__0__Would_you_like_to_use_the_Unimod_definition" + "s_for__1__modifications_The_document_will_not_change_until_peptides_with_these_m" + "odifications_are_added", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add modifications. /// </summary> public static string ViewLibraryDlg_MatchModifications_Add_modifications { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_Add_modifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to No. /// </summary> public static string ViewLibraryDlg_MatchModifications_No { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_No", resourceCulture); } } /// <summary> /// Looks up a localized string similar to the matching. /// </summary> public static string ViewLibraryDlg_MatchModifications_the_matching { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_the_matching", resourceCulture); } } /// <summary> /// Looks up a localized string similar to these. /// </summary> public static string ViewLibraryDlg_MatchModifications_these { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_these", resourceCulture); } } /// <summary> /// Looks up a localized string similar to This library appears to contain the following modifications.. /// </summary> public static string ViewLibraryDlg_MatchModifications_This_library_appears_to_contain_the_following_modifications { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_This_library_appears_to_contain_the_following_m" + "odifications", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Yes. /// </summary> public static string ViewLibraryDlg_MatchModifications_Yes { get { return ResourceManager.GetString("ViewLibraryDlg_MatchModifications_Yes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Updating list of peptides. /// </summary> public static string ViewLibraryDlg_UpdateListPeptide_Updating_list_of_peptides { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateListPeptide_Updating_list_of_peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Molecules {0} through {1} of {2} total.. /// </summary> public static string ViewLibraryDlg_UpdateStatusArea_Molecules__0__through__1__of__2__total_ { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateStatusArea_Molecules__0__through__1__of__2__total_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Page {0} of {1}. /// </summary> public static string ViewLibraryDlg_UpdateStatusArea_Page__0__of__1__ { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateStatusArea_Page__0__of__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Peptides {0} through {1} of {2} total.. /// </summary> public static string ViewLibraryDlg_UpdateStatusArea_Peptides__0__through__1__of__2__total { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateStatusArea_Peptides__0__through__1__of__2__total", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure loading spectrum. Library may be corrupted.. /// </summary> public static string ViewLibraryDlg_UpdateUI_Failure_loading_spectrum_Library_may_be_corrupted { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateUI_Failure_loading_spectrum_Library_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File. /// </summary> public static string ViewLibraryDlg_UpdateUI_File { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateUI_File", resourceCulture); } } /// <summary> /// Looks up a localized string similar to RT. /// </summary> public static string ViewLibraryDlg_UpdateUI_RT { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateUI_RT", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unauthorized access attempting to read from library.. /// </summary> public static string ViewLibraryDlg_UpdateUI_Unauthorized_access_attempting_to_read_from_library_ { get { return ResourceManager.GetString("ViewLibraryDlg_UpdateUI_Unauthorized_access_attempting_to_read_from_library_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The library {0} is no longer available in the Skyline settings. Reload the library explorer?. /// </summary> public static string ViewLibraryDlg_ViewLibraryDlg_Activated_The_library__0__is_no_longer_available_in_the_Skyline_settings__Reload_the_library_explorer_ { get { return ResourceManager.GetString("ViewLibraryDlg_ViewLibraryDlg_Activated_The_library__0__is_no_longer_available_in" + "_the_Skyline_settings__Reload_the_library_explorer_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to There are no libraries in the current settings.. /// </summary> public static string ViewLibraryDlg_ViewLibraryDlg_Activated_There_are_no_libraries_in_the_current_settings { get { return ResourceManager.GetString("ViewLibraryDlg_ViewLibraryDlg_Activated_There_are_no_libraries_in_the_current_set" + "tings", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library Molecules. /// </summary> public static string ViewLibraryPepMatching_AddPeptidesToLibraryGroup_Library_Molecules { get { return ResourceManager.GetString("ViewLibraryPepMatching_AddPeptidesToLibraryGroup_Library_Molecules", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Library Peptides. /// </summary> public static string ViewLibraryPepMatching_AddPeptidesToLibraryGroup_Library_Peptides { get { return ResourceManager.GetString("ViewLibraryPepMatching_AddPeptidesToLibraryGroup_Library_Peptides", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Skipping {0} already present. /// </summary> public static string ViewLibraryPepMatching_AddProteomePeptides_Skipping__0__already_present { get { return ResourceManager.GetString("ViewLibraryPepMatching_AddProteomePeptides_Skipping__0__already_present", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}{1} m/z. /// </summary> public static string ViewLibSpectrumGraphItem_Title__0__1_ { get { return ResourceManager.GetString("ViewLibSpectrumGraphItem_Title__0__1_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to {0}{1}, Charge {2}. /// </summary> public static string ViewLibSpectrumGraphItem_Title__0__1__Charge__2__ { get { return ResourceManager.GetString("ViewLibSpectrumGraphItem_Title__0__1__Charge__2__", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap Wand { get { object obj = ResourceManager.GetObject("Wand", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WandProhibit { get { object obj = ResourceManager.GetObject("WandProhibit", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap warning { get { object obj = ResourceManager.GetObject("warning", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Failed to find a valid MassLynx installation.. /// </summary> public static string WatersMethodExporter_EnsureLibraries_Failed_to_find_a_valid_MassLynx_installation { get { return ResourceManager.GetString("WatersMethodExporter_EnsureLibraries_Failed_to_find_a_valid_MassLynx_installation" + "", resourceCulture); } } /// <summary> /// Looks up a localized string similar to MassLynx may not be installed correctly. The library {0} could not be found.. /// </summary> public static string WatersMethodExporter_EnsureLibraries_MassLynx_may_not_be_installed_correctly_The_library__0__could_not_be_found { get { return ResourceManager.GetString("WatersMethodExporter_EnsureLibraries_MassLynx_may_not_be_installed_correctly_The_" + "library__0__could_not_be_found", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Waters method creation software may not be installed correctly.. /// </summary> public static string WatersMethodExporter_EnsureLibraries_Waters_method_creation_software_may_not_be_installed_correctly { get { return ResourceManager.GetString("WatersMethodExporter_EnsureLibraries_Waters_method_creation_software_may_not_be_i" + "nstalled_correctly", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure saving temporary post data to disk.. /// </summary> public static string WebHelpers_PostToLink_Failure_saving_temporary_post_data_to_disk_ { get { return ResourceManager.GetString("WebHelpers_PostToLink_Failure_saving_temporary_post_data_to_disk_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to File was not uploaded to the server. Please try again, or if the problem persists, please contact your Panorama server administrator.. /// </summary> public static string WebPanoramaPublishClient_ConfirmFileOnServer_File_was_not_uploaded_to_the_server__Please_try_again__or_if_the_problem_persists__please_contact_your_Panorama_server_administrator_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_ConfirmFileOnServer_File_was_not_uploaded_to_the_server_" + "_Please_try_again__or_if_the_problem_persists__please_contact_your_Panorama_serv" + "er_administrator_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error deleting temporary zip file: . /// </summary> public static string WebPanoramaPublishClient_DeleteTempZipFile_Error_deleting_temporary_zip_file__ { get { return ResourceManager.GetString("WebPanoramaPublishClient_DeleteTempZipFile_Error_deleting_temporary_zip_file__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure retrieving folder information from {0}. /// </summary> public static string WebPanoramaPublishClient_GetInfoForFolders_Failure_retrieving_folder_information_from__0_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_GetInfoForFolders_Failure_retrieving_folder_information_" + "from__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error importing Skyline file on Panorama server {0}. /// </summary> public static string WebPanoramaPublishClient_ImportDataOnServer_Error_importing_Skyline_file_on_Panorama_server__0_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_ImportDataOnServer_Error_importing_Skyline_file_on_Panor" + "ama_server__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Error renaming temporary zip file: . /// </summary> public static string WebPanoramaPublishClient_RenameTempZipFile_Error_renaming_temporary_zip_file__ { get { return ResourceManager.GetString("WebPanoramaPublishClient_RenameTempZipFile_Error_renaming_temporary_zip_file__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Deleting temporary file on server. /// </summary> public static string WebPanoramaPublishClient_SendZipFile_Deleting_temporary_file_on_server { get { return ResourceManager.GetString("WebPanoramaPublishClient_SendZipFile_Deleting_temporary_file_on_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Renaming temporary file on server. /// </summary> public static string WebPanoramaPublishClient_SendZipFile_Renaming_temporary_file_on_server { get { return ResourceManager.GetString("WebPanoramaPublishClient_SendZipFile_Renaming_temporary_file_on_server", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Status on server is: {0}. /// </summary> public static string WebPanoramaPublishClient_SendZipFile_Status_on_server_is___0_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_SendZipFile_Status_on_server_is___0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Waiting for data import completion.... /// </summary> public static string WebPanoramaPublishClient_SendZipFile_Waiting_for_data_import_completion___ { get { return ResourceManager.GetString("WebPanoramaPublishClient_SendZipFile_Waiting_for_data_import_completion___", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Importing data. {0}% complete.. /// </summary> public static string WebPanoramaPublishClient_updateProgressAndWait_Importing_data___0___complete_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_updateProgressAndWait_Importing_data___0___complete_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Uploaded {0:fs} of {1:fs}. /// </summary> public static string WebPanoramaPublishClient_webClient_UploadProgressChanged_Uploaded__0_fs__of__1_fs_ { get { return ResourceManager.GetString("WebPanoramaPublishClient_webClient_UploadProgressChanged_Uploaded__0_fs__of__1_fs" + "_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Asymmetric. /// </summary> public static string WindowMargin_ASYMMETRIC_Asymmetric { get { return ResourceManager.GetString("WindowMargin_ASYMMETRIC_Asymmetric", resourceCulture); } } /// <summary> /// Looks up a localized string similar to None. /// </summary> public static string WindowMargin_NONE_None { get { return ResourceManager.GetString("WindowMargin_NONE_None", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Symmetric. /// </summary> public static string WindowMargin_SYMMETRIC_Symmetric { get { return ResourceManager.GetString("WindowMargin_SYMMETRIC_Symmetric", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Extraction. /// </summary> public static string WindowType_EXTRACTION_Extraction { get { return ResourceManager.GetString("WindowType_EXTRACTION_Extraction", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Measurement. /// </summary> public static string WindowType_MEASUREMENT_Measurement { get { return ResourceManager.GetString("WindowType_MEASUREMENT_Measurement", resourceCulture); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardBlankDocument { get { object obj = ResourceManager.GetObject("WizardBlankDocument", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardFasta { get { object obj = ResourceManager.GetObject("WizardFasta", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardImportPeptide { get { object obj = ResourceManager.GetObject("WizardImportPeptide", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardImportProteins { get { object obj = ResourceManager.GetObject("WizardImportProteins", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardImportTransition { get { object obj = ResourceManager.GetObject("WizardImportTransition", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardMoleculeIcon { get { object obj = ResourceManager.GetObject("WizardMoleculeIcon", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardPeptideIcon { get { object obj = ResourceManager.GetObject("WizardPeptideIcon", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardPeptideSearchDDA { get { object obj = ResourceManager.GetObject("WizardPeptideSearchDDA", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardPeptideSearchDIA { get { object obj = ResourceManager.GetObject("WizardPeptideSearchDIA", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardPeptideSearchPRM { get { object obj = ResourceManager.GetObject("WizardPeptideSearchPRM", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized resource of type System.Drawing.Bitmap. /// </summary> public static System.Drawing.Bitmap WizardTransitionIcon { get { object obj = ResourceManager.GetObject("WizardTransitionIcon", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } /// <summary> /// Looks up a localized string similar to Data truncation in library header. File may be corrupted.. /// </summary> public static string XHunterLibrary_CreateCache_Data_truncation_in_library_header_File_may_be_corrupted { get { return ResourceManager.GetString("XHunterLibrary_CreateCache_Data_truncation_in_library_header_File_may_be_corrupte" + "d", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Building binary cache for {0} library. /// </summary> public static string XHunterLibrary_Load_Building_binary_cache_for__0__library { get { return ResourceManager.GetString("XHunterLibrary_Load_Building_binary_cache_for__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed loading library &apos;{0}&apos;.. /// </summary> public static string XHunterLibrary_Load_Failed_loading_library__0__ { get { return ResourceManager.GetString("XHunterLibrary_Load_Failed_loading_library__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Invalid precursor charge found. File may be corrupted.. /// </summary> public static string XHunterLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted { get { return ResourceManager.GetString("XHunterLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Loading {0} library. /// </summary> public static string XHunterLibrary_Load_Loading__0__library { get { return ResourceManager.GetString("XHunterLibrary_Load_Loading__0__library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failure trying to read peaks. /// </summary> public static string XHunterLibrary_ReadSpectrum_Failure_trying_to_read_peaks { get { return ResourceManager.GetString("XHunterLibrary_ReadSpectrum_Failure_trying_to_read_peaks", resourceCulture); } } /// <summary> /// Looks up a localized string similar to GPM Spectral Library. /// </summary> public static string XHunterLibrary_SpecFilter_GPM_Spectral_Library { get { return ResourceManager.GetString("XHunterLibrary_SpecFilter_GPM_Spectral_Library", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Expect. /// </summary> public static string XHunterLibSpec_PEP_RANK_EXPECT_Expect { get { return ResourceManager.GetString("XHunterLibSpec_PEP_RANK_EXPECT_Expect", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Processed intensity. /// </summary> public static string XHunterLibSpec_PEP_RANK_PROCESSED_INTENSITY_Processed_intensity { get { return ResourceManager.GetString("XHunterLibSpec_PEP_RANK_PROCESSED_INTENSITY_Processed_intensity", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Deserialize. /// </summary> public static string XmlElementHelper_Deserialize_Deserialize { get { return ResourceManager.GetString("XmlElementHelper_Deserialize_Deserialize", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The class {0} has no {1}.. /// </summary> public static string XmlElementHelper_XmlElementHelper_The_class__0__has_no__1__ { get { return ResourceManager.GetString("XmlElementHelper_XmlElementHelper_The_class__0__has_no__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Unexpected failure writing transitions.. /// </summary> public static string XmlMassListExporter_Export_Unexpected_failure_writing_transitions { get { return ResourceManager.GetString("XmlMassListExporter_Export_Unexpected_failure_writing_transitions", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Name property may not be missing or empty.. /// </summary> public static string XmlNamedElement_Validate_Name_property_may_not_be_missing_or_empty { get { return ResourceManager.GetString("XmlNamedElement_Validate_Name_property_may_not_be_missing_or_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to write scheduling parameters failed.. /// </summary> public static string XmlThermoMassListExporter_WriteTransition_Attempt_to_write_scheduling_parameters_failed { get { return ResourceManager.GetString("XmlThermoMassListExporter_WriteTransition_Attempt_to_write_scheduling_parameters_" + "failed", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The value &apos;{0}&apos; is not valid for the attribute {1}.. /// </summary> public static string XmlUtil_GetAttribute_The_value__0__is_not_valid_for_the_attribute__1__ { get { return ResourceManager.GetString("XmlUtil_GetAttribute_The_value__0__is_not_valid_for_the_attribute__1__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to It may have been truncated during file transfer.. /// </summary> public static string XmlUtil_GetInvalidDataMessage_It_may_have_been_truncated_during_file_transfer { get { return ResourceManager.GetString("XmlUtil_GetInvalidDataMessage_It_may_have_been_truncated_during_file_transfer", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file contains an error on line {0} at column {1}.. /// </summary> public static string XmlUtil_GetInvalidDataMessage_The_file_contains_an_error_on_line__0__at_column__1__ { get { return ResourceManager.GetString("XmlUtil_GetInvalidDataMessage_The_file_contains_an_error_on_line__0__at_column__1" + "__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file does not appear to be valid XML.. /// </summary> public static string XmlUtil_GetInvalidDataMessage_The_file_does_not_appear_to_be_valid_XML { get { return ResourceManager.GetString("XmlUtil_GetInvalidDataMessage_The_file_does_not_appear_to_be_valid_XML", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The file is empty.. /// </summary> public static string XmlUtil_GetInvalidDataMessage_The_file_is_empty { get { return ResourceManager.GetString("XmlUtil_GetInvalidDataMessage_The_file_is_empty", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to serialize list containing invalid type {0}.. /// </summary> public static string XmlUtil_WriteElements_Attempt_to_serialize_list_containing_invalid_type__0__ { get { return ResourceManager.GetString("XmlUtil_WriteElements_Attempt_to_serialize_list_containing_invalid_type__0__", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Attempt to serialize list missing an element.. /// </summary> public static string XmlUtil_WriteElements_Attempt_to_serialize_list_missing_an_element { get { return ResourceManager.GetString("XmlUtil_WriteElements_Attempt_to_serialize_list_missing_an_element", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Failed attempting to add the file {0}. /// </summary> public static string ZipFileShare_AddFile_Failed_attempting_to_add_the_file__0_ { get { return ResourceManager.GetString("ZipFileShare_AddFile_Failed_attempting_to_add_the_file__0_", resourceCulture); } } /// <summary> /// Looks up a localized string similar to The name &apos;{0}&apos; is already in use from the path {1}. /// </summary> public static string ZipFileShare_AddFile_The_name___0___is_already_in_use_from_the_path__1_ { get { return ResourceManager.GetString("ZipFileShare_AddFile_The_name___0___is_already_in_use_from_the_path__1_", resourceCulture); } } } }
1
14,016
Seems like a lot of unintended changes. Are your line break settings causing these?
ProteoWizard-pwiz
.cs
@@ -231,9 +231,10 @@ def generate_r_docs(app): r-jsonlite=1.5=r351h96ca727_0 \ r-matrix=1.2_14=r351h96ca727_0 \ r-testthat=2.0.0=r351h29659fb_0 \ - cmake=3.14.0=h52cb24c_0 + cmake=3.14.0=h52cb24c_0 \ + ca-certificates=2019.11.27=0 /home/docs/.conda/bin/conda install -q -y -n r_env -c conda-forge \ - r-pkgdown=1.3.0=r351h6115d3f_1000 \ + r-pkgdown=1.3.0=r35h6115d3f_1001 \ r-roxygen2=6.1.1=r35h0357c0b_1001 source /home/docs/.conda/bin/activate r_env export TAR=/bin/tar
1
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # LightGBM documentation build configuration file, created by # sphinx-quickstart on Thu May 4 14:30:58 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute. """Sphinx configuration file.""" import datetime import os import sys import sphinx from distutils.dir_util import copy_tree from docutils.parsers.rst import Directive from sphinx.errors import VersionRequirementError from subprocess import PIPE, Popen CURR_PATH = os.path.abspath(os.path.dirname(__file__)) LIB_PATH = os.path.join(CURR_PATH, os.path.pardir, 'python-package') sys.path.insert(0, LIB_PATH) # -- mock out modules try: from unittest.mock import Mock # Python 3.x except ImportError: from mock import Mock # Python 2.x MOCK_MODULES = ['numpy', 'scipy', 'scipy.sparse', 'sklearn', 'matplotlib', 'pandas', 'graphviz'] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() class IgnoredDirective(Directive): """Stub for unknown directives.""" has_content = True def run(self): """Do nothing.""" return [] # -- General configuration ------------------------------------------------ os.environ['LIGHTGBM_BUILD_DOC'] = '1' C_API = os.environ.get('C_API', '').lower().strip() != 'no' RTD = bool(os.environ.get('READTHEDOCS', '')) # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '1.3' # Due to sphinx.ext.napoleon if needs_sphinx > sphinx.__version__: message = 'This project needs at least Sphinx v%s' % needs_sphinx raise VersionRequirementError(message) # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] autodoc_default_flags = ['members', 'inherited-members', 'show-inheritance'] autodoc_default_options = { "members": True, "inherited-members": True, "show-inheritance": True, } # Generate autosummary pages. Output should be set with: `:toctree: pythonapi/` autosummary_generate = ['Python-API.rst'] # Only the class' docstring is inserted. autoclass_content = 'class' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # The master toctree document. master_doc = 'index' # General information about the project. project = 'LightGBM' copyright = '%s, Microsoft Corporation' % str(datetime.datetime.now().year) author = 'Microsoft Corporation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. with open(os.path.join(CURR_PATH, os.path.pardir, 'VERSION.txt'), 'r') as f: # The short X.Y version. version = f.read().strip() # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'default' # -- Configuration for C API docs generation ------------------------------ if C_API: extensions.extend([ 'breathe', ]) breathe_projects = { "LightGBM": os.path.join(CURR_PATH, 'doxyoutput', 'xml') } breathe_default_project = "LightGBM" breathe_domain_by_extension = { "h": "c", } breathe_show_define_initializer = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'includehidden': False, } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'LightGBMdoc' def generate_doxygen_xml(app): """Generate XML documentation for C API by Doxygen. Parameters ---------- app : object The application object representing the Sphinx process. """ doxygen_args = [ "INPUT={}".format(os.path.join(CURR_PATH, os.path.pardir, 'include', 'LightGBM', 'c_api.h')), "OUTPUT_DIRECTORY={}".format(os.path.join(CURR_PATH, 'doxyoutput')), "GENERATE_HTML=NO", "GENERATE_LATEX=NO", "GENERATE_XML=YES", "XML_OUTPUT=xml", "XML_PROGRAMLISTING=YES", r'ALIASES="rst=\verbatim embed:rst:leading-asterisk"', r'ALIASES+="endrst=\endverbatim"', "ENABLE_PREPROCESSING=YES", "MACRO_EXPANSION=YES", "EXPAND_ONLY_PREDEF=NO", "SKIP_FUNCTION_MACROS=NO", "SORT_BRIEF_DOCS=YES", "WARN_AS_ERROR=YES", ] doxygen_input = '\n'.join(doxygen_args) is_py3 = sys.version[0] == "3" if is_py3: doxygen_input = bytes(doxygen_input, "utf-8") if not os.path.exists(os.path.join(CURR_PATH, 'doxyoutput')): os.makedirs(os.path.join(CURR_PATH, 'doxyoutput')) try: # Warning! The following code can cause buffer overflows on RTD. # Consider suppressing output completely if RTD project silently fails. # Refer to https://github.com/svenevs/exhale # /blob/fe7644829057af622e467bb529db6c03a830da99/exhale/deploy.py#L99-L111 process = Popen(["doxygen", "-"], stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = process.communicate(doxygen_input) output = '\n'.join([i.decode('utf-8') if is_py3 else i for i in (stdout, stderr) if i is not None]) if process.returncode != 0: raise RuntimeError(output) else: print(output) except BaseException as e: raise Exception("An error has occurred while executing Doxygen\n" + str(e)) def generate_r_docs(app): """Generate documentation for R-package. Parameters ---------- app : object The application object representing the Sphinx process. """ commands = """ echo 'options(repos = "https://cran.rstudio.com")' > $HOME/.Rprofile /home/docs/.conda/bin/conda create -q -y -n r_env \ r-base=3.5.1=h1e0a451_2 \ r-devtools=1.13.6=r351h6115d3f_0 \ r-data.table=1.11.4=r351h96ca727_0 \ r-jsonlite=1.5=r351h96ca727_0 \ r-matrix=1.2_14=r351h96ca727_0 \ r-testthat=2.0.0=r351h29659fb_0 \ cmake=3.14.0=h52cb24c_0 /home/docs/.conda/bin/conda install -q -y -n r_env -c conda-forge \ r-pkgdown=1.3.0=r351h6115d3f_1000 \ r-roxygen2=6.1.1=r35h0357c0b_1001 source /home/docs/.conda/bin/activate r_env export TAR=/bin/tar cd {0} sed -i'.bak' '/# Build the package (do not touch this line!)/q' build_r.R Rscript build_r.R Rscript build_r_site.R """.format(os.path.join(CURR_PATH, os.path.pardir)) try: # Warning! The following code can cause buffer overflows on RTD. # Consider suppressing output completely if RTD project silently fails. # Refer to https://github.com/svenevs/exhale # /blob/fe7644829057af622e467bb529db6c03a830da99/exhale/deploy.py#L99-L111 process = Popen(['/bin/bash'], stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True) stdout, stderr = process.communicate(commands) output = '\n'.join([i for i in (stdout, stderr) if i is not None]) if process.returncode != 0: raise RuntimeError(output) else: print(output) except BaseException as e: raise Exception("An error has occurred while generating documentation for R-package\n" + str(e)) def setup(app): """Add new elements at Sphinx initialization time. Parameters ---------- app : object The application object representing the Sphinx process. """ first_run = not os.path.exists(os.path.join(CURR_PATH, '_FIRST_RUN.flag')) if first_run and RTD: open(os.path.join(CURR_PATH, '_FIRST_RUN.flag'), 'w').close() if C_API: app.connect("builder-inited", generate_doxygen_xml) else: app.add_directive('doxygenfile', IgnoredDirective) if RTD: # build R docs only on Read the Docs site if first_run: app.connect("builder-inited", generate_r_docs) app.connect("build-finished", lambda app, exception: copy_tree(os.path.join(CURR_PATH, os.path.pardir, "lightgbm_r", "docs"), os.path.join(app.outdir, "R"), verbose=0)) add_js_file = getattr(app, 'add_js_file', False) or app.add_javascript add_js_file("js/script.js")
1
22,080
@StrikerRUS what if we just removed build numbers and did `r-pkgdown=1.3.0`? Won't we have to fix this again if the RTD build machines change?
microsoft-LightGBM
cpp
@@ -1,6 +1,5 @@ -# $Id$ # -# Copyright (C) 2003-2006 Rational Discovery LLC +# Copyright (C) 2002-2006 greg Landrum and Rational Discovery LLC # # @@ All Rights Reserved @@ # This file is part of the RDKit.
1
# $Id$ # # Copyright (C) 2003-2006 Rational Discovery LLC # # @@ All Rights Reserved @@ # This file is part of the RDKit. # The contents are covered by the terms of the BSD license # which is included in the file license.txt, found at the root # of the RDKit source tree. # """ unit testing code for molecule suppliers """ import os import tempfile import unittest from rdkit import Chem, RDLogger from rdkit import RDConfig from rdkit.six import next class TestCase(unittest.TestCase): def tearDown(self): RDLogger.EnableLog('rdApp.error') def test1SDSupplier(self): fileN = os.path.join(RDConfig.RDCodeDir, 'VLib', 'NodeLib', 'test_data', 'NCI_aids.10.sdf') suppl = Chem.SDMolSupplier(fileN) ms = [x for x in suppl] self.assertEqual(len(ms), 10) # test repeating: ms = [x for x in suppl] self.assertEqual(len(ms), 10) # test reset: suppl.reset() m = next(suppl) self.assertEqual(m.GetProp('_Name'), '48') self.assertEqual(m.GetProp('NSC'), '48') self.assertEqual(m.GetProp('CAS_RN'), '15716-70-8') m = next(suppl) self.assertEqual(m.GetProp('_Name'), '78') self.assertEqual(m.GetProp('NSC'), '78') self.assertEqual(m.GetProp('CAS_RN'), '6290-84-2') suppl.reset() for _ in range(10): m = next(suppl) with self.assertRaises(StopIteration): m = next(suppl) def test2SmilesSupplier(self): fileN = os.path.join(RDConfig.RDCodeDir, 'VLib', 'NodeLib', 'test_data', 'pgp_20.txt') suppl = Chem.SmilesMolSupplier(fileN, delimiter='\t', smilesColumn=2, nameColumn=1, titleLine=1) ms = [x for x in suppl] self.assertEqual(len(ms), 20) # test repeating: ms = [x for x in suppl] self.assertEqual(len(ms), 20) # test reset: suppl.reset() m = next(suppl) self.assertEqual(m.GetProp('_Name'), 'ALDOSTERONE') self.assertEqual(m.GetProp('ID'), 'RD-PGP-0001') m = next(suppl) self.assertEqual(m.GetProp('_Name'), 'AMIODARONE') self.assertEqual(m.GetProp('ID'), 'RD-PGP-0002') suppl.reset() for _ in range(20): m = next(suppl) with self.assertRaises(StopIteration): m = next(suppl) def test3SmilesSupplier(self): txt = """C1CC1,1 CC(=O)O,3 fail,4 CCOC,5 """ RDLogger.DisableLog('rdApp.error') fileN = tempfile.mktemp('.csv') try: with open(fileN, 'w+') as f: f.write(txt) suppl = Chem.SmilesMolSupplier(fileN, delimiter=',', smilesColumn=0, nameColumn=1, titleLine=0) ms = [x for x in suppl] while ms.count(None): ms.remove(None) self.assertEqual(len(ms), 3) finally: os.unlink(fileN) if __name__ == '__main__': unittest.main()
1
16,667
I'm not arguing with it, but how did you decide to make this change?
rdkit-rdkit
cpp
@@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- ## ## This file is part of Invenio. -## Copyright (C) 2011, 2012 CERN. +## Copyright (C) 2011, 2012, 2013, 2014 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as
1
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2011, 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Generic Framework for extracting metadata from records using bibsched""" import traceback from datetime import datetime from itertools import chain from invenio.bibtask import task_get_option, write_message, \ task_sleep_now_if_required, \ task_update_progress from invenio.dbquery import run_sql from invenio.search_engine import get_record from invenio.search_engine import get_collection_reclist from invenio.refextract_api import get_pdf_doc from invenio.bibrecord import record_get_field_instances, \ field_get_subfield_values def task_run_core_wrapper(name, core_func, extra_vars=None, post_process=None): def fun(): try: return task_run_core(name, core_func, extra_vars=extra_vars, post_process=post_process) except Exception: # Remove extra '\n' write_message(traceback.format_exc()[:-1]) raise return fun def fetch_last_updated(name): select_sql = "SELECT last_recid, last_updated FROM xtrJOB" \ " WHERE name = %s LIMIT 1" row = run_sql(select_sql, (name,)) if not row: sql = "INSERT INTO xtrJOB (name, last_updated, last_recid) " \ "VALUES (%s, '1970-01-01', 0)" run_sql(sql, (name,)) row = run_sql(select_sql, (name,)) # Fallback in case we receive None instead of a valid date last_recid = row[0][0] or 0 last_date = row[0][1] or datetime(year=1, month=1, day=1) return last_recid, last_date def store_last_updated(recid, creation_date, name): sql = "UPDATE xtrJOB SET last_recid = %s WHERE name=%s AND last_recid < %s" run_sql(sql, (recid, name, recid)) sql = "UPDATE xtrJOB SET last_updated = %s " \ "WHERE name=%s AND last_updated < %s" iso_date = creation_date.isoformat() run_sql(sql, (iso_date, name, iso_date)) def fetch_concerned_records(name): task_update_progress("Fetching record ids") last_recid, last_date = fetch_last_updated(name) if task_get_option('new'): # Fetch all records inserted since last run sql = "SELECT `id`, `creation_date` FROM `bibrec` " \ "WHERE `creation_date` >= %s " \ "AND `id` > %s " \ "ORDER BY `creation_date`" records = run_sql(sql, (last_date.isoformat(), last_recid)) elif task_get_option('modified'): # Fetch all records inserted since last run sql = "SELECT `id`, `modification_date` FROM `bibrec` " \ "WHERE `modification_date` >= %s " \ "AND `id` > %s " \ "ORDER BY `modification_date`" records = run_sql(sql, (last_date.isoformat(), last_recid)) else: given_recids = task_get_option('recids') for collection in task_get_option('collections'): given_recids.add(get_collection_reclist(collection)) if given_recids: format_strings = ','.join(['%s'] * len(given_recids)) records = run_sql("SELECT `id`, NULL FROM `bibrec` " \ "WHERE `id` IN (%s) ORDER BY `id`" % format_strings, list(given_recids)) else: records = [] task_update_progress("Done fetching record ids") return records def fetch_concerned_arxiv_records(name): task_update_progress("Fetching arxiv record ids") dummy, last_date = fetch_last_updated(name) # Fetch all records inserted since last run sql = "SELECT `id`, `modification_date` FROM `bibrec` " \ "WHERE `modification_date` >= %s " \ "AND `creation_date` > NOW() - INTERVAL 7 DAY " \ "ORDER BY `modification_date`" \ "LIMIT 5000" records = run_sql(sql, [last_date.isoformat()]) def check_arxiv(recid): record = get_record(recid) for report_tag in record_get_field_instances(record, "037"): for category in field_get_subfield_values(report_tag, 'a'): if category.startswith('arXiv'): return True return False def check_pdf_date(recid): doc = get_pdf_doc(recid) if doc: return doc.md > last_date return False records = [(r, mod_date) for r, mod_date in records if check_arxiv(r)] records = [(r, mod_date) for r, mod_date in records if check_pdf_date(r)] write_message("recids %s" % repr([(r, mod_date.isoformat()) \ for r, mod_date in records])) task_update_progress("Done fetching arxiv record ids") return records def process_records(name, records, func, extra_vars): count = 1 total = len(records) for recid, date in records: task_sleep_now_if_required(can_stop_too=True) msg = "Extracting for %s (%d/%d)" % (recid, count, total) task_update_progress(msg) write_message(msg) func(recid, **extra_vars) if date: store_last_updated(recid, date, name) count += 1 def task_run_core(name, func, extra_vars=None, post_process=None): """Calls extract_references in refextract""" if task_get_option('task_specific_name'): name = "%s:%s" % (name, task_get_option('task_specific_name')) write_message("Starting %s" % name) if extra_vars is None: extra_vars = {} records = fetch_concerned_records(name) process_records(name, records, func, extra_vars) if task_get_option('arxiv'): extra_vars['_arxiv'] = True arxiv_name = "%s:arxiv" % name records = fetch_concerned_arxiv_records(arxiv_name) process_records(arxiv_name, records, func, extra_vars) if post_process: post_process(**extra_vars) write_message("Complete") return True def split_ids(value): """ Split ids given in the command line Possible formats are: * 1 * 1,2,3,4 * 1-5,20,30,40 Returns respectively * set([1]) * set([1,2,3,4]) * set([1,2,3,4,5,20,30,40]) """ def parse(el): el = el.strip() if not el: ret = [] elif '-' in el: start, end = el.split('-', 1) ret = xrange(int(start), int(end) + 1) else: ret = [int(el)] return ret return chain(*(parse(c) for c in value.split(',') if c.strip()))
1
12,801
1: D400 First line should end with '.', not 'd' 4: I102 copyright year is outdated, expected 2014 but got 2012 25:29: E126 continuation line over-indented for hanging indent 31: D103 Docstring missing 40: D103 Docstring missing 57: D103 Docstring missing 68: D103 Docstring missing 101: D103 Docstring missing 115: D401 First line should be imperative: 'Call', not 'Calls' 115: D400 First line should end with '.', not 't' 134: D400 First line should end with '.', not 'e' 134: D205 Blank line missing between one-line summary and description
inveniosoftware-invenio
py
@@ -42,6 +42,14 @@ type ACMEIssuer struct { // Only ACME v2 endpoints (i.e. RFC 8555) are supported. Server string `json:"server"` + // PreferredChain is the chain to use if the ACME server outputs multiple. + // PreferredChain is no guarantee that this one gets delivered by the ACME + // endpoint. + // For example, for Let's Encrypt's DST crosssign you would use: + // "DST Root CA X3" or "ISRG Root X1" for the newer Let's Encrypt root CA. + // +optional + PreferredChain string `json:"preferredChain"` + // Enables or disables validation of the ACME server TLS certificate. // If true, requests to the ACME server will not have their TLS certificate // validated (i.e. insecure connections will be allowed).
1
/* Copyright 2020 The Jetstack cert-manager contributors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( corev1 "k8s.io/api/core/v1" apiext "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1" cmmeta "github.com/jetstack/cert-manager/pkg/apis/meta/v1" ) // ACMEIssuer contains the specification for an ACME issuer. // This uses the RFC8555 specification to obtain certificates by completing // 'challenges' to prove ownership of domain identifiers. // Earlier draft versions of the ACME specification are not supported. type ACMEIssuer struct { // Email is the email address to be associated with the ACME account. // This field is optional, but it is strongly recommended to be set. // It will be used to contact you in case of issues with your account or // certificates, including expiry notification emails. // This field may be updated after the account is initially registered. // +optional Email string `json:"email,omitempty"` // Server is the URL used to access the ACME server's 'directory' endpoint. // For example, for Let's Encrypt's staging endpoint, you would use: // "https://acme-staging-v02.api.letsencrypt.org/directory". // Only ACME v2 endpoints (i.e. RFC 8555) are supported. Server string `json:"server"` // Enables or disables validation of the ACME server TLS certificate. // If true, requests to the ACME server will not have their TLS certificate // validated (i.e. insecure connections will be allowed). // Only enable this option in development environments. // The cert-manager system installed roots will be used to verify connections // to the ACME server if this is false. // Defaults to false. // +optional SkipTLSVerify bool `json:"skipTLSVerify,omitempty"` // ExternalAccountBinding is a reference to a CA external account of the ACME // server. // If set, upon registration cert-manager will attempt to associate the given // external account credentials with the registered ACME account. // +optional ExternalAccountBinding *ACMEExternalAccountBinding `json:"externalAccountBinding,omitempty"` // PrivateKey is the name of a Kubernetes Secret resource that will be used to // store the automatically generated ACME account private key. // Optionally, a `key` may be specified to select a specific entry within // the named Secret resource. // If `key` is not specified, a default of `tls.key` will be used. PrivateKey cmmeta.SecretKeySelector `json:"privateKeySecretRef"` // Solvers is a list of challenge solvers that will be used to solve // ACME challenges for the matching domains. // Solver configurations must be provided in order to obtain certificates // from an ACME server. // For more information, see: https://cert-manager.io/docs/configuration/acme/ // +optional Solvers []ACMEChallengeSolver `json:"solvers,omitempty"` } // ACMEExternalAccountBinding is a reference to a CA external account of the ACME // server. type ACMEExternalAccountBinding struct { // keyID is the ID of the CA key that the External Account is bound to. KeyID string `json:"keyID"` // keySecretRef is a Secret Key Selector referencing a data item in a Kubernetes // Secret which holds the symmetric MAC key of the External Account Binding. // The `key` is the index string that is paired with the key data in the // Secret and should not be confused with the key data itself, or indeed with // the External Account Binding keyID above. // The secret key stored in the Secret **must** be un-padded, base64 URL // encoded data. Key cmmeta.SecretKeySelector `json:"keySecretRef"` // keyAlgorithm is the MAC key algorithm that the key is used for. // Valid values are "HS256", "HS384" and "HS512". KeyAlgorithm HMACKeyAlgorithm `json:"keyAlgorithm"` } // HMACKeyAlgorithm is the name of a key algorithm used for HMAC encryption // +kubebuilder:validation:Enum=HS256;HS384;HS512 type HMACKeyAlgorithm string const ( HS256 HMACKeyAlgorithm = "HS256" HS384 HMACKeyAlgorithm = "HS384" HS512 HMACKeyAlgorithm = "HS512" ) // Configures an issuer to solve challenges using the specified options. // Only one of HTTP01 or DNS01 may be provided. type ACMEChallengeSolver struct { // Selector selects a set of DNSNames on the Certificate resource that // should be solved using this challenge solver. // If not specified, the solver will be treated as the 'default' solver // with the lowest priority, i.e. if any other solver has a more specific // match, it will be used instead. // +optional Selector *CertificateDNSNameSelector `json:"selector,omitempty"` // Configures cert-manager to attempt to complete authorizations by // performing the HTTP01 challenge flow. // It is not possible to obtain certificates for wildcard domain names // (e.g. `*.example.com`) using the HTTP01 challenge mechanism. // +optional HTTP01 *ACMEChallengeSolverHTTP01 `json:"http01,omitempty"` // Configures cert-manager to attempt to complete authorizations by // performing the DNS01 challenge flow. // +optional DNS01 *ACMEChallengeSolverDNS01 `json:"dns01,omitempty"` } // CertificateDomainSelector selects certificates using a label selector, and // can optionally select individual DNS names within those certificates. // If both MatchLabels and DNSNames are empty, this selector will match all // certificates and DNS names within them. type CertificateDNSNameSelector struct { // A label selector that is used to refine the set of certificate's that // this challenge solver will apply to. // +optional MatchLabels map[string]string `json:"matchLabels,omitempty"` // List of DNSNames that this solver will be used to solve. // If specified and a match is found, a dnsNames selector will take // precedence over a dnsZones selector. // If multiple solvers match with the same dnsNames value, the solver // with the most matching labels in matchLabels will be selected. // If neither has more matches, the solver defined earlier in the list // will be selected. // +optional DNSNames []string `json:"dnsNames,omitempty"` // List of DNSZones that this solver will be used to solve. // The most specific DNS zone match specified here will take precedence // over other DNS zone matches, so a solver specifying sys.example.com // will be selected over one specifying example.com for the domain // www.sys.example.com. // If multiple solvers match with the same dnsZones value, the solver // with the most matching labels in matchLabels will be selected. // If neither has more matches, the solver defined earlier in the list // will be selected. // +optional DNSZones []string `json:"dnsZones,omitempty"` } // ACMEChallengeSolverHTTP01 contains configuration detailing how to solve // HTTP01 challenges within a Kubernetes cluster. // Typically this is accomplished through creating 'routes' of some description // that configure ingress controllers to direct traffic to 'solver pods', which // are responsible for responding to the ACME server's HTTP requests. type ACMEChallengeSolverHTTP01 struct { // The ingress based HTTP01 challenge solver will solve challenges by // creating or modifying Ingress resources in order to route requests for // '/.well-known/acme-challenge/XYZ' to 'challenge solver' pods that are // provisioned by cert-manager for each Challenge to be completed. // +optional Ingress *ACMEChallengeSolverHTTP01Ingress `json:"ingress,omitempty"` } type ACMEChallengeSolverHTTP01Ingress struct { // Optional service type for Kubernetes solver service // +optional ServiceType corev1.ServiceType `json:"serviceType,omitempty"` // The ingress class to use when creating Ingress resources to solve ACME // challenges that use this challenge solver. // Only one of 'class' or 'name' may be specified. // +optional Class *string `json:"class,omitempty"` // The name of the ingress resource that should have ACME challenge solving // routes inserted into it in order to solve HTTP01 challenges. // This is typically used in conjunction with ingress controllers like // ingress-gce, which maintains a 1:1 mapping between external IPs and // ingress resources. // +optional Name string `json:"name,omitempty"` // Optional pod template used to configure the ACME challenge solver pods // used for HTTP01 challenges // +optional PodTemplate *ACMEChallengeSolverHTTP01IngressPodTemplate `json:"podTemplate,omitempty"` // Optional ingress template used to configure the ACME challenge solver // ingress used for HTTP01 challenges // +optional IngressTemplate *ACMEChallengeSolverHTTP01IngressTemplate `json:"ingressTemplate,omitempty"` } type ACMEChallengeSolverHTTP01IngressPodTemplate struct { // ObjectMeta overrides for the pod used to solve HTTP01 challenges. // Only the 'labels' and 'annotations' fields may be set. // If labels or annotations overlap with in-built values, the values here // will override the in-built values. // +optional ACMEChallengeSolverHTTP01IngressPodObjectMeta `json:"metadata"` // PodSpec defines overrides for the HTTP01 challenge solver pod. // Only the 'priorityClassName', 'nodeSelector', 'affinity', // 'serviceAccountName' and 'tolerations' fields are supported currently. // All other fields will be ignored. // +optional Spec ACMEChallengeSolverHTTP01IngressPodSpec `json:"spec"` } type ACMEChallengeSolverHTTP01IngressPodObjectMeta struct { // Annotations that should be added to the create ACME HTTP01 solver pods. // +optional Annotations map[string]string `json:"annotations,omitempty"` // Labels that should be added to the created ACME HTTP01 solver pods. // +optional Labels map[string]string `json:"labels,omitempty"` } type ACMEChallengeSolverHTTP01IngressPodSpec struct { // NodeSelector is a selector which must be true for the pod to fit on a node. // Selector which must match a node's labels for the pod to be scheduled on that node. // More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ // +optional NodeSelector map[string]string `json:"nodeSelector,omitempty"` // If specified, the pod's scheduling constraints // +optional Affinity *corev1.Affinity `json:"affinity,omitempty"` // If specified, the pod's tolerations. // +optional Tolerations []corev1.Toleration `json:"tolerations,omitempty"` // If specified, the pod's priorityClassName. // +optional PriorityClassName string `json:"priorityClassName,omitempty"` // If specified, the pod's service account // +optional ServiceAccountName string `json:"serviceAccountName,omitempty"` } type ACMEChallengeSolverHTTP01IngressTemplate struct { // ObjectMeta overrides for the ingress used to solve HTTP01 challenges. // Only the 'labels' and 'annotations' fields may be set. // If labels or annotations overlap with in-built values, the values here // will override the in-built values. // +optional ACMEChallengeSolverHTTP01IngressObjectMeta `json:"metadata"` } type ACMEChallengeSolverHTTP01IngressObjectMeta struct { // Annotations that should be added to the created ACME HTTP01 solver ingress. // +optional Annotations map[string]string `json:"annotations,omitempty"` // Labels that should be added to the created ACME HTTP01 solver ingress. // +optional Labels map[string]string `json:"labels,omitempty"` } // Used to configure a DNS01 challenge provider to be used when solving DNS01 // challenges. // Only one DNS provider may be configured per solver. type ACMEChallengeSolverDNS01 struct { // CNAMEStrategy configures how the DNS01 provider should handle CNAME // records when found in DNS zones. // +optional CNAMEStrategy CNAMEStrategy `json:"cnameStrategy,omitempty"` // Use the Akamai DNS zone management API to manage DNS01 challenge records. // +optional Akamai *ACMEIssuerDNS01ProviderAkamai `json:"akamai,omitempty"` // Use the Google Cloud DNS API to manage DNS01 challenge records. // +optional CloudDNS *ACMEIssuerDNS01ProviderCloudDNS `json:"cloudDNS,omitempty"` // Use the Cloudflare API to manage DNS01 challenge records. // +optional Cloudflare *ACMEIssuerDNS01ProviderCloudflare `json:"cloudflare,omitempty"` // Use the AWS Route53 API to manage DNS01 challenge records. // +optional Route53 *ACMEIssuerDNS01ProviderRoute53 `json:"route53,omitempty"` // Use the Microsoft Azure DNS API to manage DNS01 challenge records. // +optional AzureDNS *ACMEIssuerDNS01ProviderAzureDNS `json:"azureDNS,omitempty"` // Use the DigitalOcean DNS API to manage DNS01 challenge records. // +optional DigitalOcean *ACMEIssuerDNS01ProviderDigitalOcean `json:"digitalocean,omitempty"` // Use the 'ACME DNS' (https://github.com/joohoi/acme-dns) API to manage // DNS01 challenge records. // +optional AcmeDNS *ACMEIssuerDNS01ProviderAcmeDNS `json:"acmeDNS,omitempty"` // Use RFC2136 ("Dynamic Updates in the Domain Name System") (https://datatracker.ietf.org/doc/rfc2136/) // to manage DNS01 challenge records. // +optional RFC2136 *ACMEIssuerDNS01ProviderRFC2136 `json:"rfc2136,omitempty"` // Configure an external webhook based DNS01 challenge solver to manage // DNS01 challenge records. // +optional Webhook *ACMEIssuerDNS01ProviderWebhook `json:"webhook,omitempty"` } // CNAMEStrategy configures how the DNS01 provider should handle CNAME records // when found in DNS zones. // By default, the None strategy will be applied (i.e. do not follow CNAMEs). // +kubebuilder:validation:Enum=None;Follow type CNAMEStrategy string const ( // NoneStrategy indicates that no CNAME resolution strategy should be used // when determining which DNS zone to update during DNS01 challenges. NoneStrategy = "None" // FollowStrategy will cause cert-manager to recurse through CNAMEs in // order to determine which DNS zone to update during DNS01 challenges. // This is useful if you do not want to grant cert-manager access to your // root DNS zone, and instead delegate the _acme-challenge.example.com // subdomain to some other, less privileged domain. FollowStrategy = "Follow" ) // ACMEIssuerDNS01ProviderAkamai is a structure containing the DNS // configuration for Akamai DNS—Zone Record Management API type ACMEIssuerDNS01ProviderAkamai struct { ServiceConsumerDomain string `json:"serviceConsumerDomain"` ClientToken cmmeta.SecretKeySelector `json:"clientTokenSecretRef"` ClientSecret cmmeta.SecretKeySelector `json:"clientSecretSecretRef"` AccessToken cmmeta.SecretKeySelector `json:"accessTokenSecretRef"` } // ACMEIssuerDNS01ProviderCloudDNS is a structure containing the DNS // configuration for Google Cloud DNS type ACMEIssuerDNS01ProviderCloudDNS struct { // +optional ServiceAccount *cmmeta.SecretKeySelector `json:"serviceAccountSecretRef,omitempty"` Project string `json:"project"` // HostedZoneName is an optional field that tells cert-manager in which // Cloud DNS zone the challenge record has to be created. // If left empty cert-manager will automatically choose a zone. // +optional HostedZoneName string `json:"hostedZoneName,omitempty"` } // ACMEIssuerDNS01ProviderCloudflare is a structure containing the DNS // configuration for Cloudflare. // One of `apiKeySecretRef` or `apiTokenSecretRef` must be provided. type ACMEIssuerDNS01ProviderCloudflare struct { // Email of the account, only required when using API key based authentication. // +optional Email string `json:"email,omitempty"` // API key to use to authenticate with Cloudflare. // Note: using an API token to authenticate is now the recommended method // as it allows greater control of permissions. // +optional APIKey *cmmeta.SecretKeySelector `json:"apiKeySecretRef,omitempty"` // API token used to authenticate with Cloudflare. // +optional APIToken *cmmeta.SecretKeySelector `json:"apiTokenSecretRef,omitempty"` } // ACMEIssuerDNS01ProviderDigitalOcean is a structure containing the DNS // configuration for DigitalOcean Domains type ACMEIssuerDNS01ProviderDigitalOcean struct { Token cmmeta.SecretKeySelector `json:"tokenSecretRef"` } // ACMEIssuerDNS01ProviderRoute53 is a structure containing the Route 53 // configuration for AWS type ACMEIssuerDNS01ProviderRoute53 struct { // The AccessKeyID is used for authentication. If not set we fall-back to using env vars, shared credentials file or AWS Instance metadata // see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials // +optional AccessKeyID string `json:"accessKeyID,omitempty"` // The SecretAccessKey is used for authentication. If not set we fall-back to using env vars, shared credentials file or AWS Instance metadata // https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials // +optional SecretAccessKey cmmeta.SecretKeySelector `json:"secretAccessKeySecretRef"` // Role is a Role ARN which the Route53 provider will assume using either the explicit credentials AccessKeyID/SecretAccessKey // or the inferred credentials from environment variables, shared credentials file or AWS Instance metadata // +optional Role string `json:"role,omitempty"` // If set, the provider will manage only this zone in Route53 and will not do an lookup using the route53:ListHostedZonesByName api call. // +optional HostedZoneID string `json:"hostedZoneID,omitempty"` // Always set the region when using AccessKeyID and SecretAccessKey Region string `json:"region"` } // ACMEIssuerDNS01ProviderAzureDNS is a structure containing the // configuration for Azure DNS type ACMEIssuerDNS01ProviderAzureDNS struct { // if both this and ClientSecret are left unset MSI will be used // +optional ClientID string `json:"clientID,omitempty"` // if both this and ClientID are left unset MSI will be used // +optional ClientSecret *cmmeta.SecretKeySelector `json:"clientSecretSecretRef,omitempty"` SubscriptionID string `json:"subscriptionID"` // when specifying ClientID and ClientSecret then this field is also needed // +optional TenantID string `json:"tenantID,omitempty"` ResourceGroupName string `json:"resourceGroupName"` // +optional HostedZoneName string `json:"hostedZoneName,omitempty"` // +optional Environment AzureDNSEnvironment `json:"environment,omitempty"` } // +kubebuilder:validation:Enum=AzurePublicCloud;AzureChinaCloud;AzureGermanCloud;AzureUSGovernmentCloud type AzureDNSEnvironment string const ( AzurePublicCloud AzureDNSEnvironment = "AzurePublicCloud" AzureChinaCloud AzureDNSEnvironment = "AzureChinaCloud" AzureGermanCloud AzureDNSEnvironment = "AzureGermanCloud" AzureUSGovernmentCloud AzureDNSEnvironment = "AzureUSGovernmentCloud" ) // ACMEIssuerDNS01ProviderAcmeDNS is a structure containing the // configuration for ACME-DNS servers type ACMEIssuerDNS01ProviderAcmeDNS struct { Host string `json:"host"` AccountSecret cmmeta.SecretKeySelector `json:"accountSecretRef"` } // ACMEIssuerDNS01ProviderRFC2136 is a structure containing the // configuration for RFC2136 DNS type ACMEIssuerDNS01ProviderRFC2136 struct { // The IP address or hostname of an authoritative DNS server supporting // RFC2136 in the form host:port. If the host is an IPv6 address it must be // enclosed in square brackets (e.g [2001:db8::1]) ; port is optional. // This field is required. Nameserver string `json:"nameserver"` // The name of the secret containing the TSIG value. // If ``tsigKeyName`` is defined, this field is required. // +optional TSIGSecret cmmeta.SecretKeySelector `json:"tsigSecretSecretRef,omitempty"` // The TSIG Key name configured in the DNS. // If ``tsigSecretSecretRef`` is defined, this field is required. // +optional TSIGKeyName string `json:"tsigKeyName,omitempty"` // The TSIG Algorithm configured in the DNS supporting RFC2136. Used only // when ``tsigSecretSecretRef`` and ``tsigKeyName`` are defined. // Supported values are (case-insensitive): ``HMACMD5`` (default), // ``HMACSHA1``, ``HMACSHA256`` or ``HMACSHA512``. // +optional TSIGAlgorithm string `json:"tsigAlgorithm,omitempty"` } // ACMEIssuerDNS01ProviderWebhook specifies configuration for a webhook DNS01 // provider, including where to POST ChallengePayload resources. type ACMEIssuerDNS01ProviderWebhook struct { // The API group name that should be used when POSTing ChallengePayload // resources to the webhook apiserver. // This should be the same as the GroupName specified in the webhook // provider implementation. GroupName string `json:"groupName"` // The name of the solver to use, as defined in the webhook provider // implementation. // This will typically be the name of the provider, e.g. 'cloudflare'. SolverName string `json:"solverName"` // Additional configuration that should be passed to the webhook apiserver // when challenges are processed. // This can contain arbitrary JSON data. // Secret values should not be specified in this stanza. // If secret values are needed (e.g. credentials for a DNS service), you // should use a SecretKeySelector to reference a Secret resource. // For details on the schema of this field, consult the webhook provider // implementation's documentation. // +optional Config *apiext.JSON `json:"config,omitempty"` } type ACMEIssuerStatus struct { // URI is the unique account identifier, which can also be used to retrieve // account details from the CA // +optional URI string `json:"uri,omitempty"` // LastRegisteredEmail is the email associated with the latest registered // ACME account, in order to track changes made to registered account // associated with the Issuer // +optional LastRegisteredEmail string `json:"lastRegisteredEmail,omitempty"` }
1
23,196
Explain that this must match the `CommonName` of the first (or is it last?) certificate in the chain, where the first certificate is the chain root CA and the last certificate is the leaf certificate.
jetstack-cert-manager
go
@@ -16,7 +16,7 @@ type Config struct { Storage *storageConfig `yaml:"storage" required:"true"` Blockchain *blockchain.Config `yaml:"blockchain"` MetricsListenAddr string `yaml:"metrics_listen_addr" default:"127.0.0.1:14004"` - ColdStart *ColdStartConfig `yaml:"cold_start"` + ColdStart bool `yaml:"cold_start"` NumWorkers int `yaml:"num_workers" default:"64"` }
1
package dwh import ( "github.com/jinzhu/configor" "github.com/pkg/errors" "github.com/sonm-io/core/accounts" "github.com/sonm-io/core/blockchain" "github.com/sonm-io/core/insonmnia/logging" ) type Config struct { Logging LoggingConfig `yaml:"logging"` GRPCListenAddr string `yaml:"grpc_address" default:"127.0.0.1:15021"` HTTPListenAddr string `yaml:"http_address" default:"127.0.0.1:15022"` Eth accounts.EthConfig `yaml:"ethereum" required:"true"` Storage *storageConfig `yaml:"storage" required:"true"` Blockchain *blockchain.Config `yaml:"blockchain"` MetricsListenAddr string `yaml:"metrics_listen_addr" default:"127.0.0.1:14004"` ColdStart *ColdStartConfig `yaml:"cold_start"` NumWorkers int `yaml:"num_workers" default:"64"` } type storageConfig struct { Backend string `required:"true" yaml:"driver"` Endpoint string `required:"true" yaml:"endpoint"` } type LoggingConfig struct { Level *logging.Level `required:"true" default:"warn"` } type ColdStartConfig struct { UpToBlock uint64 `yaml:"up_to_block"` } type YAMLConfig struct { Endpoint string `yaml:"endpoint" required:"false"` } func NewConfig(path string) (*Config, error) { cfg := &Config{} err := configor.Load(cfg, path) if err != nil { return nil, err } if cfg.NumWorkers < 1 { return nil, errors.New("at least one worker must be specified") } return cfg, nil }
1
7,062
do we really need this behavour to be configurable?
sonm-io-core
go
@@ -3,7 +3,7 @@ */ var MongooseArray = require('./array') - , ObjectId = require('../drivers/node-mongodb-native/objectid') + , ObjectId = require('./objectid') , ObjectIdSchema = require('../schema/objectid') , utils = require('../utils') , util = require('util')
1
/*! * Module dependencies. */ var MongooseArray = require('./array') , ObjectId = require('../drivers/node-mongodb-native/objectid') , ObjectIdSchema = require('../schema/objectid') , utils = require('../utils') , util = require('util') , Document = require('../document') /** * DocumentArray constructor * * @param {Array} values * @param {String} path the path to this array * @param {Document} doc parent document * @api private * @return {MongooseDocumentArray} * @inherits MongooseArray * @see http://bit.ly/f6CnZU */ function MongooseDocumentArray (values, path, doc) { var arr = []; // Values always have to be passed to the constructor to initialize, since // otherwise MongooseArray#push will mark the array as modified to the parent. arr.push.apply(arr, values); utils.decorate( arr, MongooseDocumentArray.mixin ); arr.isMongooseArray = true; arr.isMongooseDocumentArray = true; arr._atomics = {}; arr.validators = []; arr._path = path; if (doc) { arr._parent = doc; arr._schema = doc.schema.path(path); arr._handlers = { isNew: arr.notify('isNew'), save: arr.notify('save') }; doc.on('save', arr._handlers.save); doc.on('isNew', arr._handlers.isNew); } return arr; } /*! * Inherits from MongooseArray */ MongooseDocumentArray.mixin = Object.create( MongooseArray.mixin ); /** * Overrides MongooseArray#cast * * @api private */ MongooseDocumentArray.mixin._cast = function (value) { if (value instanceof this._schema.casterConstructor) { if (!(value.__parent && value.__parentArray)) { // value may have been created using array.create() value.__parent = this._parent; value.__parentArray = this; } return value; } // handle cast('string') or cast(ObjectId) etc. // only objects are permitted so we can safely assume that // non-objects are to be interpreted as _id if (Buffer.isBuffer(value) || value instanceof ObjectId || !utils.isObject(value)) { value = { _id: value }; } return new this._schema.casterConstructor(value, this); }; /** * Searches array items for the first document with a matching _id. * * ####Example: * * var embeddedDoc = m.array.id(some_id); * * @return {EmbeddedDocument|null} the subdocument or null if not found. * @param {ObjectId|String|Number|Buffer} id * @TODO cast to the _id based on schema for proper comparison * @api public */ MongooseDocumentArray.mixin.id = function (id) { var casted , sid , _id try { var casted_ = ObjectIdSchema.prototype.cast.call({}, id); if (casted_) casted = String(casted_); } catch (e) { casted = null; } for (var i = 0, l = this.length; i < l; i++) { _id = this[i].get('_id'); if (_id instanceof Document) { sid || (sid = String(id)); if (sid == _id._id) return this[i]; } else if (!(_id instanceof ObjectId)) { sid || (sid = String(id)); if (sid == _id) return this[i]; } else if (casted == _id) { return this[i]; } } return null; }; /** * Returns a native js Array of plain js objects * * ####NOTE: * * _Each sub-document is converted to a plain object by calling its `#toObject` method._ * * @param {Object} [options] optional options to pass to each documents `toObject` method call during conversion * @return {Array} * @api public */ MongooseDocumentArray.mixin.toObject = function (options) { return this.map(function (doc) { return doc && doc.toObject(options) || null; }); }; /** * Helper for console.log * * @api public */ MongooseDocumentArray.mixin.inspect = function () { return '[' + this.map(function (doc) { if (doc) { return doc.inspect ? doc.inspect() : util.inspect(doc) } return 'null' }).join('\n') + ']'; }; /** * Creates a subdocument casted to this schema. * * This is the same subdocument constructor used for casting. * * @param {Object} obj the value to cast to this arrays SubDocument schema * @api public */ MongooseDocumentArray.mixin.create = function (obj) { return new this._schema.casterConstructor(obj); } /** * Creates a fn that notifies all child docs of `event`. * * @param {String} event * @return {Function} * @api private */ MongooseDocumentArray.mixin.notify = function notify (event) { var self = this; return function notify (val) { var i = self.length; while (i--) { if (!self[i]) continue; switch(event) { // only swap for save event for now, we may change this to all event types later case 'save': val = self[i]; break; default: // NO-OP break; } self[i].emit(event, val); } } } /*! * Module exports. */ module.exports = MongooseDocumentArray;
1
12,517
Hmm interesting. Do the tests check out on this?
Automattic-mongoose
js
@@ -26,11 +26,15 @@ import java.util.concurrent.TimeUnit; import com.google.common.base.Stopwatch; import org.apache.logging.log4j.Logger; +import org.apache.tuweni.bytes.Bytes; +import org.apache.tuweni.concurrent.ExpiringMap; import org.apache.tuweni.units.bigints.UInt256; public class PoWSolver { + private static final int MAX_OMMER_DEPTH = 8; private static final Logger LOG = getLogger(); + private final long powJobTimeToLive; public static class PoWSolverJob {
1
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.besu.ethereum.mainnet; import static org.apache.logging.log4j.LogManager.getLogger; import org.hyperledger.besu.ethereum.chain.PoWObserver; import org.hyperledger.besu.util.Subscribers; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import com.google.common.base.Stopwatch; import org.apache.logging.log4j.Logger; import org.apache.tuweni.units.bigints.UInt256; public class PoWSolver { private static final Logger LOG = getLogger(); public static class PoWSolverJob { private final PoWSolverInputs inputs; private final CompletableFuture<PoWSolution> nonceFuture; PoWSolverJob(final PoWSolverInputs inputs, final CompletableFuture<PoWSolution> nonceFuture) { this.inputs = inputs; this.nonceFuture = nonceFuture; } public static PoWSolverJob createFromInputs(final PoWSolverInputs inputs) { return new PoWSolverJob(inputs, new CompletableFuture<>()); } PoWSolverInputs getInputs() { return inputs; } public boolean isDone() { return nonceFuture.isDone(); } void solvedWith(final PoWSolution solution) { nonceFuture.complete(solution); } public void cancel() { nonceFuture.cancel(false); } public void failed(final Throwable ex) { nonceFuture.completeExceptionally(ex); } PoWSolution getSolution() throws InterruptedException, ExecutionException { return nonceFuture.get(); } } private final long NO_MINING_CONDUCTED = -1; private final Iterable<Long> nonceGenerator; private final PoWHasher poWHasher; private volatile long hashesPerSecond = NO_MINING_CONDUCTED; private final Boolean stratumMiningEnabled; private final Subscribers<PoWObserver> ethHashObservers; private final EpochCalculator epochCalculator; private volatile Optional<PoWSolverJob> currentJob = Optional.empty(); public PoWSolver( final Iterable<Long> nonceGenerator, final PoWHasher poWHasher, final Boolean stratumMiningEnabled, final Subscribers<PoWObserver> ethHashObservers, final EpochCalculator epochCalculator) { this.nonceGenerator = nonceGenerator; this.poWHasher = poWHasher; this.stratumMiningEnabled = stratumMiningEnabled; this.ethHashObservers = ethHashObservers; ethHashObservers.forEach(observer -> observer.setSubmitWorkCallback(this::submitSolution)); this.epochCalculator = epochCalculator; } public PoWSolution solveFor(final PoWSolverJob job) throws InterruptedException, ExecutionException { currentJob = Optional.of(job); if (stratumMiningEnabled) { ethHashObservers.forEach(observer -> observer.newJob(job.inputs)); } else { findValidNonce(); } return currentJob.get().getSolution(); } private void findValidNonce() { final Stopwatch operationTimer = Stopwatch.createStarted(); final PoWSolverJob job = currentJob.get(); long hashesExecuted = 0; for (final Long n : nonceGenerator) { if (job.isDone()) { return; } final Optional<PoWSolution> solution = testNonce(job.getInputs(), n); solution.ifPresent(job::solvedWith); hashesExecuted++; final double operationDurationSeconds = operationTimer.elapsed(TimeUnit.NANOSECONDS) / 1e9; hashesPerSecond = (long) (hashesExecuted / operationDurationSeconds); } job.failed(new IllegalStateException("No valid nonce found.")); } private Optional<PoWSolution> testNonce(final PoWSolverInputs inputs, final long nonce) { return Optional.ofNullable( poWHasher.hash(nonce, inputs.getBlockNumber(), epochCalculator, inputs.getPrePowHash())) .filter(sol -> UInt256.fromBytes(sol.getSolution()).compareTo(inputs.getTarget()) <= 0); } public void cancel() { currentJob.ifPresent(PoWSolverJob::cancel); } public Optional<PoWSolverInputs> getWorkDefinition() { return currentJob.flatMap(job -> Optional.of(job.getInputs())); } public Optional<Long> hashesPerSecond() { if (hashesPerSecond == NO_MINING_CONDUCTED) { return Optional.empty(); } return Optional.of(hashesPerSecond); } public boolean submitSolution(final PoWSolution solution) { final Optional<PoWSolverJob> jobSnapshot = currentJob; if (jobSnapshot.isEmpty()) { LOG.debug("No current job, rejecting miner work"); return false; } final PoWSolverJob job = jobSnapshot.get(); final PoWSolverInputs inputs = job.getInputs(); if (!inputs.getPrePowHash().equals(solution.getPowHash())) { LOG.debug("Miner's solution does not match current job"); return false; } final Optional<PoWSolution> calculatedSolution = testNonce(inputs, solution.getNonce()); if (calculatedSolution.isPresent()) { LOG.debug("Accepting a solution from a miner"); currentJob.get().solvedWith(calculatedSolution.get()); return true; } LOG.debug("Rejecting a solution from a miner"); return false; } public Iterable<Long> getNonceGenerator() { return nonceGenerator; } }
1
25,612
For another PR, another `--X` candidate.
hyperledger-besu
java
@@ -10,4 +10,5 @@ const ( GettingStarted = "Getting Started ✨" Develop = "Develop 🔧" Settings = "Settings" -) + Release = "Release" +)
1
// +build !windows // Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 // Package groups contains the names of command groups package group const ( GettingStarted = "Getting Started ✨" Develop = "Develop 🔧" Settings = "Settings" )
1
10,776
Do we need to add this constant for `group_windows.go`? Otherwise, I believe the build will fail on windows. Also what do you think of adding the emoji at the end? The windows one could be plain.
aws-copilot-cli
go
@@ -4,6 +4,8 @@ package net.sourceforge.pmd.lang.ast.xpath; +import static junit.framework.TestCase.assertTrue; + import java.util.HashMap; import java.util.Map;
1
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.lang.ast.xpath; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.junit.Test; import net.sourceforge.pmd.lang.ast.DummyNode; /** * Unit test for {@link AttributeAxisIterator} */ public class AttributeAxisIteratorTest { /** * Test hasNext and next. */ @Test public void testAttributeAxisIterator() { DummyNode dummyNode = new DummyNode(1); dummyNode.testingOnlySetBeginLine(1); dummyNode.testingOnlySetBeginColumn(1); AttributeAxisIterator it = new AttributeAxisIterator(dummyNode); Map<String, Attribute> atts = new HashMap<>(); while (it.hasNext()) { Attribute attribute = it.next(); atts.put(attribute.getName(), attribute); } Assert.assertEquals(7, atts.size()); Assert.assertTrue(atts.containsKey("BeginColumn")); Assert.assertTrue(atts.containsKey("BeginLine")); Assert.assertTrue(atts.containsKey("FindBoundary")); Assert.assertTrue(atts.containsKey("Image")); Assert.assertTrue(atts.containsKey("SingleLine")); Assert.assertTrue(atts.containsKey("EndColumn")); Assert.assertTrue(atts.containsKey("EndLine")); } }
1
13,865
this is a junit 3 import, not a junit 4
pmd-pmd
java
@@ -113,9 +113,16 @@ func TestReconcile(t *testing.T) { assert.Equal(t, hivev1.UnsupportedHibernationReason, cond.Reason) }, }, + { + name: "clustersync not yet created", + cd: cdBuilder.Options(o.shouldHibernate).Build(), + expectError: true, + }, { name: "start hibernating, no syncsets", cd: cdBuilder.Options(o.shouldHibernate).Build(), + // The clustersync controller creates a ClusterSync even when there are no syncsets + cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) },
1
package hibernation import ( "context" "fmt" "testing" "time" "github.com/golang/mock/gomock" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" batchv1 "k8s.io/api/batch/v1" certsv1 "k8s.io/api/certificates/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" fakekubeclient "k8s.io/client-go/kubernetes/fake" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/client/fake" "sigs.k8s.io/controller-runtime/pkg/reconcile" machineapi "github.com/openshift/machine-api-operator/pkg/apis/machine/v1beta1" hivev1 "github.com/openshift/hive/apis/hive/v1" hiveintv1alpha1 "github.com/openshift/hive/apis/hiveinternal/v1alpha1" "github.com/openshift/hive/pkg/constants" "github.com/openshift/hive/pkg/controller/hibernation/mock" "github.com/openshift/hive/pkg/remoteclient" remoteclientmock "github.com/openshift/hive/pkg/remoteclient/mock" testcd "github.com/openshift/hive/pkg/test/clusterdeployment" testcs "github.com/openshift/hive/pkg/test/clustersync" testgeneric "github.com/openshift/hive/pkg/test/generic" ) const ( namespace = "test-namespace" cdName = "test-cluster-deployment" ) func TestReconcile(t *testing.T) { logger := log.New() logger.SetLevel(log.DebugLevel) scheme := runtime.NewScheme() corev1.AddToScheme(scheme) batchv1.AddToScheme(scheme) hivev1.AddToScheme(scheme) hiveintv1alpha1.AddToScheme(scheme) machineapi.AddToScheme(scheme) cdBuilder := testcd.FullBuilder(namespace, cdName, scheme).Options( testcd.Installed(), testcd.WithClusterVersion("4.4.9"), ) o := clusterDeploymentOptions{} csBuilder := testcs.FullBuilder(namespace, cdName, scheme).Options( testcs.WithFirstSuccessTime(time.Now().Add(-10 * time.Hour)), ) tests := []struct { name string cd *hivev1.ClusterDeployment cs *hiveintv1alpha1.ClusterSync setupActuator func(actuator *mock.MockHibernationActuator) setupCSRHelper func(helper *mock.MockcsrHelper) setupRemote func(builder *remoteclientmock.MockBuilder) validate func(t *testing.T, cd *hivev1.ClusterDeployment) expectError bool }{ { name: "cluster deleted", cd: cdBuilder.GenericOptions(testgeneric.Deleted()).Options(o.shouldHibernate).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { if getHibernatingCondition(cd) != nil { t.Errorf("not expecting hibernating condition") } }, }, { name: "hibernation condition initialized", cd: cdBuilder.Options(o.notInstalled, o.shouldHibernate).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionUnknown, cond.Status) assert.Equal(t, hivev1.InitializedConditionReason, cond.Reason) }, }, { name: "do not hibernate unsupported versions", cd: cdBuilder.Options(testcd.WithClusterVersion("4.3.11")).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, hivev1.UnsupportedHibernationReason, cond.Reason) }, }, { name: "start hibernating, older version", cd: cdBuilder.Options(o.shouldHibernate, testcd.WithClusterVersion("4.3.11")).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, hivev1.UnsupportedHibernationReason, cond.Reason) }, }, { name: "start hibernating, no syncsets", cd: cdBuilder.Options(o.shouldHibernate).Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.StoppingHibernationReason, cond.Reason) }, }, { name: "start hibernating, syncsets not applied", cd: cdBuilder.Options(o.shouldHibernate, testcd.InstalledTimestamp(time.Now())).Build(), cs: csBuilder.Options(testcs.WithNoFirstSuccessTime()).Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, hivev1.SyncSetsNotAppliedReason, cond.Reason) }, expectError: true, }, { name: "start hibernating, syncsets not applied but 10 minutes have passed since cd install", cd: cdBuilder.Options(o.shouldHibernate, testcd.InstalledTimestamp(time.Now().Add(-15*time.Minute))).Build(), cs: csBuilder.Options(testcs.WithNoFirstSuccessTime()).Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.StoppingHibernationReason, cond.Reason) }, }, { name: "start hibernating", cd: cdBuilder.Options(o.shouldHibernate).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.StoppingHibernationReason, cond.Reason) }, }, { name: "fail to stop machines", cd: cdBuilder.Options(o.shouldHibernate).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(fmt.Errorf("error")) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, hivev1.FailedToStopHibernationReason, cond.Reason) }, }, { name: "stopping, machines have stopped", cd: cdBuilder.Options(o.shouldHibernate, o.stopping).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().MachinesStopped(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(true, nil, nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.HibernatingHibernationReason, cond.Reason) }, }, { name: "stopping, machines have not stopped", cd: cdBuilder.Options(o.shouldHibernate, o.stopping).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) actuator.EXPECT().MachinesStopped(gomock.Any(), gomock.Any(), gomock.Any()).Times(1). Return(false, []string{"running-1", "pending-1", "stopping-1"}, nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.StoppingHibernationReason, cond.Reason) assert.Equal(t, "Stopping cluster machines. Some machines have not yet stopped: pending-1,running-1,stopping-1", cond.Message) }, }, { name: "stopping after MachinesFailedToStart", cd: cdBuilder.Options(o.shouldHibernate).Build( testcd.WithCondition(hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Status: corev1.ConditionTrue, Reason: hivev1.FailedToStartHibernationReason, }, )), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { // Ensure we try to stop machines in this state (bugfix) actuator.EXPECT().StopMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.StoppingHibernationReason, cond.Reason) }, }, { name: "start resuming", cd: cdBuilder.Options(o.hibernating).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.ResumingHibernationReason, cond.Reason) }, }, { name: "fail to start machines", cd: cdBuilder.Options(o.hibernating).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(fmt.Errorf("error")) }, expectError: true, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.FailedToStartHibernationReason, cond.Reason) }, }, { name: "starting machines have already failed to start", cd: cdBuilder.Options(o.resuming).Build( testcd.WithCondition(hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Status: corev1.ConditionTrue, Reason: hivev1.FailedToStartHibernationReason, }, )), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { // Call will succeed which should clear the FailedToStart reason: actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.ResumingHibernationReason, cond.Reason) }, }, { name: "starting, machines have not started", cd: cdBuilder.Options(o.resuming).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) actuator.EXPECT().MachinesRunning(gomock.Any(), gomock.Any(), gomock.Any()).Times(1). Return(false, []string{"stopped-1", "pending-1"}, nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.ResumingHibernationReason, cond.Reason) assert.Equal(t, "Starting cluster machines. Some machines are not yet running: pending-1,stopped-1", cond.Message) }, }, { name: "starting, machines running, nodes ready", cd: cdBuilder.Options(o.resuming).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().MachinesRunning(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(true, nil, nil) }, setupRemote: func(builder *remoteclientmock.MockBuilder) { c := fake.NewFakeClientWithScheme(scheme, readyNodes()...) builder.EXPECT().Build().Times(1).Return(c, nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, hivev1.RunningHibernationReason, cond.Reason) }, }, { name: "starting, machines running, unready node", cd: cdBuilder.Options(o.resuming).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().MachinesRunning(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(true, nil, nil) }, setupRemote: func(builder *remoteclientmock.MockBuilder) { fakeClient := fake.NewFakeClientWithScheme(scheme, unreadyNode()...) fakeKubeClient := fakekubeclient.NewSimpleClientset() builder.EXPECT().Build().Times(1).Return(fakeClient, nil) builder.EXPECT().BuildKubeClient().Times(1).Return(fakeKubeClient, nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.ResumingHibernationReason, cond.Reason) }, }, { name: "starting, machines running, unready node, csrs to approve", cd: cdBuilder.Options(o.resuming).Build(), cs: csBuilder.Build(), setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().MachinesRunning(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(true, nil, nil) }, setupRemote: func(builder *remoteclientmock.MockBuilder) { fakeClient := fake.NewFakeClientWithScheme(scheme, unreadyNode()...) fakeKubeClient := fakekubeclient.NewSimpleClientset(csrs()...) builder.EXPECT().Build().Times(1).Return(fakeClient, nil) builder.EXPECT().BuildKubeClient().Times(1).Return(fakeKubeClient, nil) }, setupCSRHelper: func(helper *mock.MockcsrHelper) { count := len(csrs()) helper.EXPECT().IsApproved(gomock.Any()).Times(count).Return(false) helper.EXPECT().Parse(gomock.Any()).Times(count).Return(nil, nil) helper.EXPECT().Authorize(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Times(count).Return(nil) helper.EXPECT().Approve(gomock.Any(), gomock.Any()).Times(count).Return(nil) }, validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, hivev1.ResumingHibernationReason, cond.Reason) }, }, { name: "previously unsupported hibernation, now supported", cd: cdBuilder.Options(o.unsupported, testcd.WithHibernateAfter(8*time.Hour)).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, hivev1.RunningHibernationReason, cond.Reason) assert.Equal(t, "Hibernation capable", cond.Message) }, }, { name: "hibernate fake cluster", cd: cdBuilder.Build( o.shouldHibernate, testcd.InstalledTimestamp(time.Now().Add(-1*time.Hour)), testcd.WithAnnotation(constants.HiveFakeClusterAnnotation, "true")), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, hivev1.HibernatingHibernationReason, cond.Reason) assert.Equal(t, corev1.ConditionTrue, cond.Status) assert.Equal(t, "Fake cluster is stopped", cond.Message) }, }, { name: "start hibernated fake cluster", cd: cdBuilder.Options(o.hibernating, testcd.WithPowerState(hivev1.RunningClusterPowerState), testcd.WithAnnotation(constants.HiveFakeClusterAnnotation, "true")).Build(), cs: csBuilder.Build(), validate: func(t *testing.T, cd *hivev1.ClusterDeployment) { cond := getHibernatingCondition(cd) require.NotNil(t, cond) assert.Equal(t, hivev1.RunningHibernationReason, cond.Reason) assert.Equal(t, corev1.ConditionFalse, cond.Status) assert.Equal(t, "Fake cluster is running", cond.Message) }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { ctrl := gomock.NewController(t) mockActuator := mock.NewMockHibernationActuator(ctrl) mockActuator.EXPECT().CanHandle(gomock.Any()).AnyTimes().Return(true) if test.setupActuator != nil { test.setupActuator(mockActuator) } mockBuilder := remoteclientmock.NewMockBuilder(ctrl) if test.setupRemote != nil { test.setupRemote(mockBuilder) } mockCSRHelper := mock.NewMockcsrHelper(ctrl) if test.setupCSRHelper != nil { test.setupCSRHelper(mockCSRHelper) } actuators = []HibernationActuator{mockActuator} var c client.Client if test.cs != nil { c = fake.NewFakeClientWithScheme(scheme, test.cd, test.cs) } else { c = fake.NewFakeClientWithScheme(scheme, test.cd) } reconciler := hibernationReconciler{ Client: c, logger: log.WithField("controller", "hibernation"), remoteClientBuilder: func(cd *hivev1.ClusterDeployment) remoteclient.Builder { return mockBuilder }, csrUtil: mockCSRHelper, } _, err := reconciler.Reconcile(context.TODO(), reconcile.Request{ NamespacedName: types.NamespacedName{Namespace: namespace, Name: cdName}, }) if test.expectError { assert.Error(t, err, "expected error from reconcile") } else { assert.NoError(t, err, "expected no error from reconcile") } if test.validate != nil { cd := &hivev1.ClusterDeployment{} err := c.Get(context.TODO(), client.ObjectKey{Namespace: namespace, Name: cdName}, cd) require.Nil(t, err) test.validate(t, cd) } ctrl.Finish() }) } } func TestHibernateAfter(t *testing.T) { logger := log.New() logger.SetLevel(log.DebugLevel) scheme := runtime.NewScheme() corev1.AddToScheme(scheme) hivev1.AddToScheme(scheme) hiveintv1alpha1.AddToScheme(scheme) cdBuilder := testcd.FullBuilder(namespace, cdName, scheme).Options( testcd.Installed(), testcd.WithClusterVersion("4.4.9"), ) o := clusterDeploymentOptions{} csBuilder := testcs.FullBuilder(namespace, cdName, scheme).Options( testcs.WithFirstSuccessTime(time.Now().Add(-10 * time.Hour)), ) tests := []struct { name string setupActuator func(actuator *mock.MockHibernationActuator) cd *hivev1.ClusterDeployment cs *hiveintv1alpha1.ClusterSync expectError bool expectRequeueAfter time.Duration expectedPowerState hivev1.ClusterPowerState expectedConditionReason string }{ { name: "cluster due for hibernate no condition", // cluster that has never been hibernated and thus has no running condition cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithClusterVersion("4.3.11"), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Build(), expectedPowerState: "", expectedConditionReason: hivev1.UnsupportedHibernationReason, }, { name: "cluster not yet due for hibernate older version", // cluster that has never been hibernated and thus has no running condition cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithClusterVersion("4.3.11"), testcd.InstalledTimestamp(time.Now().Add(-3*time.Hour))), cs: csBuilder.Options( testcs.WithFirstSuccessTime(time.Now().Add(-3 * time.Hour)), ).Build(), expectedPowerState: "", expectedConditionReason: hivev1.UnsupportedHibernationReason, }, { name: "cluster not yet due for hibernate no running condition", // cluster that has never been hibernated setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) }, cd: cdBuilder.Build( testcd.WithHibernateAfter(12*time.Hour), testcd.WithPowerState(hivev1.RunningClusterPowerState), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Build(), expectRequeueAfter: 2 * time.Hour, expectedPowerState: hivev1.RunningClusterPowerState, }, { name: "cluster with running condition due for hibernate", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 9*time.Hour)), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { name: "cluster with running condition not due for hibernate", cd: cdBuilder.Build( testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 6*time.Hour)), testcd.WithHibernateAfter(20*time.Hour), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Build(), expectRequeueAfter: 14 * time.Hour, expectedPowerState: "", }, { name: "cluster waking from hibernate", setupActuator: func(actuator *mock.MockHibernationActuator) { actuator.EXPECT().StartMachines(gomock.Any(), gomock.Any(), gomock.Any()).Times(1).Return(nil) actuator.EXPECT().MachinesRunning(gomock.Any(), gomock.Any(), gomock.Any()).Times(1). Return(false, []string{"no-running-1", "no-running-2"}, nil) }, cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour)), testcd.WithCondition(hibernatingCondition(corev1.ConditionTrue, hivev1.ResumingHibernationReason, 8*time.Hour)), o.shouldRun), cs: csBuilder.Build(), expectedPowerState: hivev1.RunningClusterPowerState, expectRequeueAfter: stateCheckInterval, }, { name: "cluster due for hibernate, no syncsets", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Minute), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 8*time.Minute)), testcd.InstalledTimestamp(time.Now().Add(-8*time.Minute))), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { name: "cluster due for hibernate but syncsets not applied", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Minute), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 8*time.Minute)), testcd.InstalledTimestamp(time.Now().Add(-8*time.Minute))), cs: csBuilder.Options( testcs.WithNoFirstSuccessTime(), ).Build(), expectError: true, expectedPowerState: "", expectRequeueAfter: time.Duration(time.Minute * 2), }, { name: "cluster due for hibernate, syncsets not applied but 10 minutes have passed since cd install", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 9*time.Hour)), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Options( testcs.WithNoFirstSuccessTime(), ).Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { name: "cluster due for hibernate, syncsets successfully applied", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 9*time.Hour)), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Options( testcs.WithFirstSuccessTime(time.Now()), ).Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { name: "fake cluster due for hibernate but syncsets not applied", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Minute), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 8*time.Minute)), testcd.WithAnnotation(constants.HiveFakeClusterAnnotation, "true"), testcd.InstalledTimestamp(time.Now().Add(-8*time.Minute))), cs: csBuilder.Options( testcs.WithNoFirstSuccessTime(), ).Build(), expectError: true, expectedPowerState: "", expectRequeueAfter: time.Duration(time.Minute * 2), }, { name: "fake cluster due for hibernate, syncsets successfully applied", cd: cdBuilder.Build( testcd.WithHibernateAfter(8*time.Hour), testcd.WithCondition(hibernatingCondition(corev1.ConditionFalse, hivev1.RunningHibernationReason, 9*time.Hour)), testcd.WithAnnotation(constants.HiveFakeClusterAnnotation, "true"), testcd.InstalledTimestamp(time.Now().Add(-10*time.Hour))), cs: csBuilder.Options( testcs.WithFirstSuccessTime(time.Now()), ).Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, { name: "hibernate fake cluster", cd: cdBuilder.Build( testcd.WithHibernateAfter(1*time.Hour), testcd.InstalledTimestamp(time.Now().Add(-1*time.Hour)), testcd.WithAnnotation(constants.HiveFakeClusterAnnotation, "true")), cs: csBuilder.Build(), expectedPowerState: hivev1.HibernatingClusterPowerState, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockActuator := mock.NewMockHibernationActuator(ctrl) mockActuator.EXPECT().CanHandle(gomock.Any()).AnyTimes().Return(true) if test.setupActuator != nil { test.setupActuator(mockActuator) } mockBuilder := remoteclientmock.NewMockBuilder(ctrl) mockCSRHelper := mock.NewMockcsrHelper(ctrl) actuators = []HibernationActuator{mockActuator} var c client.Client if test.cs != nil { c = fake.NewFakeClientWithScheme(scheme, test.cd, test.cs) } else { c = fake.NewFakeClientWithScheme(scheme, test.cd) } reconciler := hibernationReconciler{ Client: c, logger: log.WithField("controller", "hibernation"), remoteClientBuilder: func(cd *hivev1.ClusterDeployment) remoteclient.Builder { return mockBuilder }, csrUtil: mockCSRHelper, } result, err := reconciler.Reconcile(context.TODO(), reconcile.Request{ NamespacedName: types.NamespacedName{Namespace: namespace, Name: cdName}, }) if test.expectError { assert.Error(t, err, "expected error from reconcile") } else { assert.NoError(t, err, "expected no error from reconcile") } // Need to do fuzzy requeue after matching if test.expectRequeueAfter == 0 { assert.Zero(t, result.RequeueAfter) } else { assert.GreaterOrEqual(t, result.RequeueAfter.Seconds(), (test.expectRequeueAfter - 10*time.Second).Seconds(), "requeue after too small") assert.LessOrEqual(t, result.RequeueAfter.Seconds(), (test.expectRequeueAfter + 10*time.Second).Seconds(), "request after too large") } cd := &hivev1.ClusterDeployment{} err = c.Get(context.TODO(), client.ObjectKey{Namespace: namespace, Name: cdName}, cd) require.NoError(t, err, "error looking up ClusterDeployment") assert.Equal(t, test.expectedPowerState, cd.Spec.PowerState, "unexpected PowerState") }) } } func hibernatingCondition(status corev1.ConditionStatus, reason string, lastTransitionAgo time.Duration) hivev1.ClusterDeploymentCondition { return hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Status: status, Message: "unused", Reason: reason, LastTransitionTime: metav1.NewTime(time.Now().Add(-lastTransitionAgo)), } } type clusterDeploymentOptions struct{} func (*clusterDeploymentOptions) notInstalled(cd *hivev1.ClusterDeployment) { cd.Spec.Installed = false } func (*clusterDeploymentOptions) shouldHibernate(cd *hivev1.ClusterDeployment) { cd.Spec.PowerState = hivev1.HibernatingClusterPowerState } func (*clusterDeploymentOptions) shouldRun(cd *hivev1.ClusterDeployment) { cd.Spec.PowerState = hivev1.RunningClusterPowerState } func (*clusterDeploymentOptions) stopping(cd *hivev1.ClusterDeployment) { cd.Status.Conditions = append(cd.Status.Conditions, hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Reason: hivev1.StoppingHibernationReason, Status: corev1.ConditionTrue, }) } func (*clusterDeploymentOptions) hibernating(cd *hivev1.ClusterDeployment) { cd.Status.Conditions = append(cd.Status.Conditions, hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Reason: hivev1.HibernatingHibernationReason, Status: corev1.ConditionTrue, }) } func (*clusterDeploymentOptions) resuming(cd *hivev1.ClusterDeployment) { cd.Status.Conditions = append(cd.Status.Conditions, hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Reason: hivev1.ResumingHibernationReason, Status: corev1.ConditionTrue, }) } func (*clusterDeploymentOptions) unsupported(cd *hivev1.ClusterDeployment) { cd.Status.Conditions = append(cd.Status.Conditions, hivev1.ClusterDeploymentCondition{ Type: hivev1.ClusterHibernatingCondition, Status: corev1.ConditionFalse, Reason: hivev1.UnsupportedHibernationReason, }) } func getHibernatingCondition(cd *hivev1.ClusterDeployment) *hivev1.ClusterDeploymentCondition { for i := range cd.Status.Conditions { if cd.Status.Conditions[i].Type == hivev1.ClusterHibernatingCondition { return &cd.Status.Conditions[i] } } return nil } func readyNodes() []runtime.Object { nodes := make([]runtime.Object, 5) for i := 0; i < len(nodes); i++ { node := &corev1.Node{} node.Name = fmt.Sprintf("node-%d", i) node.Status.Conditions = []corev1.NodeCondition{ { Type: corev1.NodeReady, Status: corev1.ConditionTrue, }, } nodes[i] = node } return nodes } func unreadyNode() []runtime.Object { node := &corev1.Node{} node.Name = "unready" node.Status.Conditions = []corev1.NodeCondition{ { Type: corev1.NodeReady, Status: corev1.ConditionFalse, }, } return append(readyNodes(), node) } func csrs() []runtime.Object { result := make([]runtime.Object, 5) for i := 0; i < len(result); i++ { csr := &certsv1.CertificateSigningRequest{} csr.Name = fmt.Sprintf("csr-%d", i) result[i] = csr } return result }
1
19,942
shouldn't there be a reque expected here?
openshift-hive
go
@@ -63,7 +63,7 @@ namespace NLog.Layouts /// /// Thread-agnostic layouts only use contents of <see cref="LogEventInfo"/> for its output. /// </remarks> - internal bool ThreadAgnostic { get; private set; } + internal bool ThreadAgnostic { get; set; } /// <summary> /// Gets the level of stack trace information required for rendering.
1
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Layouts { using System; using System.Linq; using System.ComponentModel; using System.Text; using NLog.Config; using NLog.Internal; using NLog.Common; /// <summary> /// Abstract interface that layouts must implement. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1724:TypeNamesShouldNotMatchNamespaces", Justification = "Few people will see this conflict.")] [NLogConfigurationItem] public abstract class Layout : ISupportsInitialize, IRenderable { /// <summary> /// Is this layout initialized? See <see cref="Initialize(NLog.Config.LoggingConfiguration)"/> /// </summary> private bool isInitialized; private bool scannedForObjects; /// <summary> /// Gets a value indicating whether this layout is thread-agnostic (can be rendered on any thread). /// </summary> /// <remarks> /// Layout is thread-agnostic if it has been marked with [ThreadAgnostic] attribute and all its children are /// like that as well. /// /// Thread-agnostic layouts only use contents of <see cref="LogEventInfo"/> for its output. /// </remarks> internal bool ThreadAgnostic { get; private set; } /// <summary> /// Gets the level of stack trace information required for rendering. /// </summary> internal StackTraceUsage StackTraceUsage { get; private set; } private const int MaxInitialRenderBufferLength = 16384; private int maxRenderedLength; /// <summary> /// Gets the logging configuration this target is part of. /// </summary> protected LoggingConfiguration LoggingConfiguration { get; private set; } /// <summary> /// Converts a given text to a <see cref="Layout" />. /// </summary> /// <param name="text">Text to be converted.</param> /// <returns><see cref="SimpleLayout"/> object represented by the text.</returns> public static implicit operator Layout([Localizable(false)] string text) { return FromString(text); } /// <summary> /// Implicitly converts the specified string to a <see cref="SimpleLayout"/>. /// </summary> /// <param name="layoutText">The layout string.</param> /// <returns>Instance of <see cref="SimpleLayout"/>.</returns> public static Layout FromString(string layoutText) { return FromString(layoutText, ConfigurationItemFactory.Default); } /// <summary> /// Implicitly converts the specified string to a <see cref="SimpleLayout"/>. /// </summary> /// <param name="layoutText">The layout string.</param> /// <param name="configurationItemFactory">The NLog factories to use when resolving layout renderers.</param> /// <returns>Instance of <see cref="SimpleLayout"/>.</returns> public static Layout FromString(string layoutText, ConfigurationItemFactory configurationItemFactory) { return new SimpleLayout(layoutText, configurationItemFactory); } /// <summary> /// Precalculates the layout for the specified log event and stores the result /// in per-log event cache. /// /// Only if the layout doesn't have [ThreadAgnostic] and doens't contain layouts with [ThreadAgnostic]. /// </summary> /// <param name="logEvent">The log event.</param> /// <remarks> /// Calling this method enables you to store the log event in a buffer /// and/or potentially evaluate it in another thread even though the /// layout may contain thread-dependent renderer. /// </remarks> public virtual void Precalculate(LogEventInfo logEvent) { if (!this.ThreadAgnostic) { this.Render(logEvent); } } /// <summary> /// Renders the event info in layout. /// </summary> /// <param name="logEvent">The event info.</param> /// <returns>String representing log event.</returns> public string Render(LogEventInfo logEvent) { if (!this.isInitialized) { this.Initialize(this.LoggingConfiguration); } return this.GetFormattedMessage(logEvent); } internal void PrecalculateBuilder(LogEventInfo logEvent, StringBuilder target) { if (!this.ThreadAgnostic) { RenderAppendBuilder(logEvent, target, true); } } /// <summary> /// Renders the event info in layout to the provided target /// </summary> /// <param name="logEvent">The event info.</param> /// <param name="target">Appends the string representing log event to target</param> /// <param name="cacheLayoutResult">Should rendering result be cached on LogEventInfo</param> internal void RenderAppendBuilder(LogEventInfo logEvent, StringBuilder target, bool cacheLayoutResult = false) { if (!this.isInitialized) { this.Initialize(this.LoggingConfiguration); } if (!this.ThreadAgnostic) { string cachedValue; if (logEvent.TryGetCachedLayoutValue(this, out cachedValue)) { target.Append(cachedValue); return; } } int initialLength = this.maxRenderedLength; if (initialLength > MaxInitialRenderBufferLength) { initialLength = MaxInitialRenderBufferLength; } using (var localTarget = new AppendBuilderCreator(target, initialLength)) { RenderFormattedMessage(logEvent, localTarget.Builder); if (localTarget.Builder.Length > this.maxRenderedLength) { this.maxRenderedLength = localTarget.Builder.Length; } if (cacheLayoutResult && !this.ThreadAgnostic) { // when needed as it generates garbage logEvent.AddCachedLayoutValue(this, localTarget.Builder.ToString()); } } } /// <summary> /// Valid default implementation of <see cref="GetFormattedMessage" />, when having implemented the optimized <see cref="RenderFormattedMessage"/> /// </summary> /// <param name="logEvent">The logging event.</param> /// <param name="reusableBuilder">StringBuilder to help minimize allocations [optional].</param> /// <param name="cacheLayoutResult">Should rendering result be cached on LogEventInfo</param> /// <returns>The rendered layout.</returns> internal string RenderAllocateBuilder(LogEventInfo logEvent, StringBuilder reusableBuilder = null, bool cacheLayoutResult = true) { if (!this.ThreadAgnostic) { string cachedValue; if (logEvent.TryGetCachedLayoutValue(this, out cachedValue)) { return cachedValue; } } int initialLength = this.maxRenderedLength; if (initialLength > MaxInitialRenderBufferLength) { initialLength = MaxInitialRenderBufferLength; } var sb = reusableBuilder ?? new StringBuilder(initialLength); RenderFormattedMessage(logEvent, sb); if (sb.Length > this.maxRenderedLength) { this.maxRenderedLength = sb.Length; } if (cacheLayoutResult && !this.ThreadAgnostic) { return logEvent.AddCachedLayoutValue(this, sb.ToString()); } else { return sb.ToString(); } } /// <summary> /// Renders the layout for the specified logging event by invoking layout renderers. /// </summary> /// <param name="logEvent">The logging event.</param> /// <param name="target">Initially empty <see cref="StringBuilder"/> for the result</param> protected virtual void RenderFormattedMessage(LogEventInfo logEvent, StringBuilder target) { target.Append(GetFormattedMessage(logEvent) ?? string.Empty); } /// <summary> /// Initializes this instance. /// </summary> /// <param name="configuration">The configuration.</param> void ISupportsInitialize.Initialize(LoggingConfiguration configuration) { this.Initialize(configuration); } /// <summary> /// Closes this instance. /// </summary> void ISupportsInitialize.Close() { this.Close(); } /// <summary> /// Initializes this instance. /// </summary> /// <param name="configuration">The configuration.</param> internal void Initialize(LoggingConfiguration configuration) { if (!this.isInitialized) { this.LoggingConfiguration = configuration; this.isInitialized = true; this.scannedForObjects = false; this.InitializeLayout(); if (!this.scannedForObjects) { InternalLogger.Debug("Initialized Layout done but not scanned for objects"); PerformObjectScanning(); } } } internal void PerformObjectScanning() { var objectGraphScannerList = ObjectGraphScanner.FindReachableObjects<object>(this); // determine whether the layout is thread-agnostic // layout is thread agnostic if it is thread-agnostic and // all its nested objects are thread-agnostic. this.ThreadAgnostic = objectGraphScannerList.All(item => item.GetType().IsDefined(typeof(ThreadAgnosticAttribute), true)); // determine the max StackTraceUsage, to decide if Logger needs to capture callsite this.StackTraceUsage = StackTraceUsage.None; // Incase this Layout should implement IStackTraceUsage this.StackTraceUsage = objectGraphScannerList.OfType<IUsesStackTrace>().DefaultIfEmpty().Max(item => item == null ? StackTraceUsage.None : item.StackTraceUsage); this.scannedForObjects = true; } /// <summary> /// Closes this instance. /// </summary> internal void Close() { if (this.isInitialized) { this.LoggingConfiguration = null; this.isInitialized = false; this.CloseLayout(); } } /// <summary> /// Initializes the layout. /// </summary> protected virtual void InitializeLayout() { PerformObjectScanning(); } /// <summary> /// Closes the layout. /// </summary> protected virtual void CloseLayout() { } /// <summary> /// Renders the layout for the specified logging event by invoking layout renderers. /// </summary> /// <param name="logEvent">The logging event.</param> /// <returns>The rendered layout.</returns> protected abstract string GetFormattedMessage(LogEventInfo logEvent); /// <summary> /// Register a custom Layout. /// </summary> /// <remarks>Short-cut for registing to default <see cref="ConfigurationItemFactory"/></remarks> /// <typeparam name="T"> Type of the Layout.</typeparam> /// <param name="name"> Name of the Layout.</param> public static void Register<T>(string name) where T : Layout { var layoutRendererType = typeof(T); Register(name, layoutRendererType); } /// <summary> /// Register a custom Layout. /// </summary> /// <remarks>Short-cut for registing to default <see cref="ConfigurationItemFactory"/></remarks> /// <param name="layoutType"> Type of the Layout.</param> /// <param name="name"> Name of the Layout.</param> public static void Register(string name, Type layoutType) { ConfigurationItemFactory.Default.Layouts .RegisterDefinition(name, layoutType); } } }
1
15,361
I prefer to make it `protected set`. What do you think?
NLog-NLog
.cs
@@ -178,7 +178,9 @@ type Options struct { Gateway GatewayOpts `json:"gateway,omitempty"` LeafNode LeafNodeOpts `json:"leaf,omitempty"` JetStream bool `json:"jetstream"` - StoreDir string `json:"store_dir"` + JetStreamMaxMemory int64 `json:"-"` + JetStreamMaxStore int64 `json:"-"` + StoreDir string `json:"-"` ProfPort int `json:"-"` PidFile string `json:"-"` PortsFileDir string `json:"-"`
1
// Copyright 2012-2019 The NATS Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package server import ( "context" "crypto/tls" "crypto/x509" "errors" "flag" "fmt" "io/ioutil" "net" "net/url" "os" "path/filepath" "regexp" "runtime" "strconv" "strings" "sync/atomic" "time" "github.com/nats-io/jwt" "github.com/nats-io/nkeys" "github.com/nats-io/nats-server/v2/conf" ) var allowUnknownTopLevelField = int32(0) // NoErrOnUnknownFields can be used to change the behavior the processing // of a configuration file. By default, an error is reported if unknown // fields are found. If `noError` is set to true, no error will be reported // if top-level unknown fields are found. func NoErrOnUnknownFields(noError bool) { var val int32 if noError { val = int32(1) } atomic.StoreInt32(&allowUnknownTopLevelField, val) } // ClusterOpts are options for clusters. // NOTE: This structure is no longer used for monitoring endpoints // and json tags are deprecated and may be removed in the future. type ClusterOpts struct { Host string `json:"addr,omitempty"` Port int `json:"cluster_port,omitempty"` Username string `json:"-"` Password string `json:"-"` AuthTimeout float64 `json:"auth_timeout,omitempty"` Permissions *RoutePermissions `json:"-"` TLSTimeout float64 `json:"-"` TLSConfig *tls.Config `json:"-"` TLSMap bool `json:"-"` ListenStr string `json:"-"` Advertise string `json:"-"` NoAdvertise bool `json:"-"` ConnectRetries int `json:"-"` } // GatewayOpts are options for gateways. // NOTE: This structure is no longer used for monitoring endpoints // and json tags are deprecated and may be removed in the future. type GatewayOpts struct { Name string `json:"name"` Host string `json:"addr,omitempty"` Port int `json:"port,omitempty"` Username string `json:"-"` Password string `json:"-"` AuthTimeout float64 `json:"auth_timeout,omitempty"` TLSConfig *tls.Config `json:"-"` TLSTimeout float64 `json:"tls_timeout,omitempty"` TLSMap bool `json:"-"` Advertise string `json:"advertise,omitempty"` ConnectRetries int `json:"connect_retries,omitempty"` Gateways []*RemoteGatewayOpts `json:"gateways,omitempty"` RejectUnknown bool `json:"reject_unknown,omitempty"` // Not exported, for tests. resolver netResolver sendQSubsBufSize int } // RemoteGatewayOpts are options for connecting to a remote gateway // NOTE: This structure is no longer used for monitoring endpoints // and json tags are deprecated and may be removed in the future. type RemoteGatewayOpts struct { Name string `json:"name"` TLSConfig *tls.Config `json:"-"` TLSTimeout float64 `json:"tls_timeout,omitempty"` URLs []*url.URL `json:"urls,omitempty"` } // LeafNodeOpts are options for a given server to accept leaf node connections and/or connect to a remote cluster. type LeafNodeOpts struct { Host string `json:"addr,omitempty"` Port int `json:"port,omitempty"` Username string `json:"-"` Password string `json:"-"` Account string `json:"-"` Users []*User `json:"-"` AuthTimeout float64 `json:"auth_timeout,omitempty"` TLSConfig *tls.Config `json:"-"` TLSTimeout float64 `json:"tls_timeout,omitempty"` TLSMap bool `json:"-"` Advertise string `json:"-"` NoAdvertise bool `json:"-"` ReconnectInterval time.Duration `json:"-"` // For solicited connections to other clusters/superclusters. Remotes []*RemoteLeafOpts `json:"remotes,omitempty"` // Not exported, for tests. resolver netResolver dialTimeout time.Duration loopDelay time.Duration } // RemoteLeafOpts are options for connecting to a remote server as a leaf node. type RemoteLeafOpts struct { LocalAccount string `json:"local_account,omitempty"` URLs []*url.URL `json:"urls,omitempty"` Credentials string `json:"-"` TLS bool `json:"-"` TLSConfig *tls.Config `json:"-"` TLSTimeout float64 `json:"tls_timeout,omitempty"` } // Options block for nats-server. // NOTE: This structure is no longer used for monitoring endpoints // and json tags are deprecated and may be removed in the future. type Options struct { ConfigFile string `json:"-"` ServerName string `json:"server_name"` Host string `json:"addr"` Port int `json:"port"` ClientAdvertise string `json:"-"` Trace bool `json:"-"` Debug bool `json:"-"` NoLog bool `json:"-"` NoSigs bool `json:"-"` NoSublistCache bool `json:"-"` DisableShortFirstPing bool `json:"-"` Logtime bool `json:"-"` MaxConn int `json:"max_connections"` MaxSubs int `json:"max_subscriptions,omitempty"` Nkeys []*NkeyUser `json:"-"` Users []*User `json:"-"` Accounts []*Account `json:"-"` SystemAccount string `json:"-"` AllowNewAccounts bool `json:"-"` Username string `json:"-"` Password string `json:"-"` Authorization string `json:"-"` PingInterval time.Duration `json:"ping_interval"` MaxPingsOut int `json:"ping_max"` HTTPHost string `json:"http_host"` HTTPPort int `json:"http_port"` HTTPSPort int `json:"https_port"` AuthTimeout float64 `json:"auth_timeout"` MaxControlLine int32 `json:"max_control_line"` MaxPayload int32 `json:"max_payload"` MaxPending int64 `json:"max_pending"` Cluster ClusterOpts `json:"cluster,omitempty"` Gateway GatewayOpts `json:"gateway,omitempty"` LeafNode LeafNodeOpts `json:"leaf,omitempty"` JetStream bool `json:"jetstream"` StoreDir string `json:"store_dir"` ProfPort int `json:"-"` PidFile string `json:"-"` PortsFileDir string `json:"-"` LogFile string `json:"-"` LogSizeLimit int64 `json:"-"` Syslog bool `json:"-"` RemoteSyslog string `json:"-"` Routes []*url.URL `json:"-"` RoutesStr string `json:"-"` TLSTimeout float64 `json:"tls_timeout"` TLS bool `json:"-"` TLSVerify bool `json:"-"` TLSMap bool `json:"-"` TLSCert string `json:"-"` TLSKey string `json:"-"` TLSCaCert string `json:"-"` TLSConfig *tls.Config `json:"-"` WriteDeadline time.Duration `json:"-"` MaxClosedClients int `json:"-"` LameDuckDuration time.Duration `json:"-"` // MaxTracedMsgLen is the maximum printable length for traced messages. MaxTracedMsgLen int `json:"-"` // Operating a trusted NATS server TrustedKeys []string `json:"-"` TrustedOperators []*jwt.OperatorClaims `json:"-"` AccountResolver AccountResolver `json:"-"` resolverPreloads map[string]string CustomClientAuthentication Authentication `json:"-"` CustomRouterAuthentication Authentication `json:"-"` // CheckConfig configuration file syntax test was successful and exit. CheckConfig bool `json:"-"` // ConnectErrorReports specifies the number of failed attempts // at which point server should report the failure of an initial // connection to a route, gateway or leaf node. // See DEFAULT_CONNECT_ERROR_REPORTS for default value. ConnectErrorReports int // ReconnectErrorReports is similar to ConnectErrorReports except // that this applies to reconnect events. ReconnectErrorReports int // private fields, used to know if bool options are explicitly // defined in config and/or command line params. inConfig map[string]bool inCmdLine map[string]bool // private fields, used for testing gatewaysSolicitDelay time.Duration routeProto int } type netResolver interface { LookupHost(ctx context.Context, host string) ([]string, error) } // Clone performs a deep copy of the Options struct, returning a new clone // with all values copied. func (o *Options) Clone() *Options { if o == nil { return nil } clone := &Options{} *clone = *o if o.Users != nil { clone.Users = make([]*User, len(o.Users)) for i, user := range o.Users { clone.Users[i] = user.clone() } } if o.Nkeys != nil { clone.Nkeys = make([]*NkeyUser, len(o.Nkeys)) for i, nkey := range o.Nkeys { clone.Nkeys[i] = nkey.clone() } } if o.Routes != nil { clone.Routes = deepCopyURLs(o.Routes) } if o.TLSConfig != nil { clone.TLSConfig = o.TLSConfig.Clone() } if o.Cluster.TLSConfig != nil { clone.Cluster.TLSConfig = o.Cluster.TLSConfig.Clone() } if o.Gateway.TLSConfig != nil { clone.Gateway.TLSConfig = o.Gateway.TLSConfig.Clone() } if len(o.Gateway.Gateways) > 0 { clone.Gateway.Gateways = make([]*RemoteGatewayOpts, len(o.Gateway.Gateways)) for i, g := range o.Gateway.Gateways { clone.Gateway.Gateways[i] = g.clone() } } // FIXME(dlc) - clone leaf node stuff. return clone } func deepCopyURLs(urls []*url.URL) []*url.URL { if urls == nil { return nil } curls := make([]*url.URL, len(urls)) for i, u := range urls { cu := &url.URL{} *cu = *u curls[i] = cu } return curls } // Configuration file authorization section. type authorization struct { // Singles user string pass string token string acc string // Multiple Nkeys/Users nkeys []*NkeyUser users []*User timeout float64 defaultPermissions *Permissions } // TLSConfigOpts holds the parsed tls config information, // used with flag parsing type TLSConfigOpts struct { CertFile string KeyFile string CaFile string Verify bool Insecure bool Map bool Timeout float64 Ciphers []uint16 CurvePreferences []tls.CurveID } var tlsUsage = ` TLS configuration is specified in the tls section of a configuration file: e.g. tls { cert_file: "./certs/server-cert.pem" key_file: "./certs/server-key.pem" ca_file: "./certs/ca.pem" verify: true verify_and_map: true cipher_suites: [ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256" ] curve_preferences: [ "CurveP256", "CurveP384", "CurveP521" ] } Available cipher suites include: ` // ProcessConfigFile processes a configuration file. // FIXME(dlc): A bit hacky func ProcessConfigFile(configFile string) (*Options, error) { opts := &Options{} if err := opts.ProcessConfigFile(configFile); err != nil { // If only warnings then continue and return the options. if cerr, ok := err.(*processConfigErr); ok && len(cerr.Errors()) == 0 { return opts, nil } return nil, err } return opts, nil } // token is an item parsed from the configuration. type token interface { Value() interface{} Line() int IsUsedVariable() bool SourceFile() string Position() int } // unwrapValue can be used to get the token and value from an item // to be able to report the line number in case of an incorrect // configuration. func unwrapValue(v interface{}) (token, interface{}) { switch tk := v.(type) { case token: return tk, tk.Value() default: return nil, v } } // configureSystemAccount configures a system account // if present in the configuration. func configureSystemAccount(o *Options, m map[string]interface{}) error { configure := func(v interface{}) error { tk, v := unwrapValue(v) sa, ok := v.(string) if !ok { return &configErr{tk, "system account name must be a string"} } o.SystemAccount = sa return nil } if v, ok := m["system_account"]; ok { return configure(v) } else if v, ok := m["system"]; ok { return configure(v) } return nil } // ProcessConfigFile updates the Options structure with options // present in the given configuration file. // This version is convenient if one wants to set some default // options and then override them with what is in the config file. // For instance, this version allows you to do something such as: // // opts := &Options{Debug: true} // opts.ProcessConfigFile(myConfigFile) // // If the config file contains "debug: false", after this call, // opts.Debug would really be false. It would be impossible to // achieve that with the non receiver ProcessConfigFile() version, // since one would not know after the call if "debug" was not present // or was present but set to false. func (o *Options) ProcessConfigFile(configFile string) error { o.ConfigFile = configFile if configFile == "" { return nil } m, err := conf.ParseFileWithChecks(configFile) if err != nil { return err } // Collect all errors and warnings and report them all together. errors := make([]error, 0) warnings := make([]error, 0) // First check whether a system account has been defined, // as that is a condition for other features to be enabled. if err := configureSystemAccount(o, m); err != nil { errors = append(errors, err) } for k, v := range m { tk, v := unwrapValue(v) switch strings.ToLower(k) { case "listen": hp, err := parseListen(v) if err != nil { errors = append(errors, &configErr{tk, err.Error()}) continue } o.Host = hp.host o.Port = hp.port case "client_advertise": o.ClientAdvertise = v.(string) case "port": o.Port = int(v.(int64)) case "server_name": o.ServerName = v.(string) case "host", "net": o.Host = v.(string) case "debug": o.Debug = v.(bool) trackExplicitVal(o, &o.inConfig, "Debug", o.Debug) case "trace": o.Trace = v.(bool) trackExplicitVal(o, &o.inConfig, "Trace", o.Trace) case "logtime": o.Logtime = v.(bool) trackExplicitVal(o, &o.inConfig, "Logtime", o.Logtime) case "disable_sublist_cache", "no_sublist_cache": o.NoSublistCache = v.(bool) case "accounts": err := parseAccounts(tk, o, &errors, &warnings) if err != nil { errors = append(errors, err) continue } case "authorization": auth, err := parseAuthorization(tk, o, &errors, &warnings) if err != nil { errors = append(errors, err) continue } o.Username = auth.user o.Password = auth.pass o.Authorization = auth.token if (auth.user != "" || auth.pass != "") && auth.token != "" { err := &configErr{tk, "Cannot have a user/pass and token"} errors = append(errors, err) continue } o.AuthTimeout = auth.timeout // Check for multiple users defined if auth.users != nil { if auth.user != "" { err := &configErr{tk, "Can not have a single user/pass and a users array"} errors = append(errors, err) continue } if auth.token != "" { err := &configErr{tk, "Can not have a token and a users array"} errors = append(errors, err) continue } // Users may have been added from Accounts parsing, so do an append here o.Users = append(o.Users, auth.users...) } // Check for nkeys if auth.nkeys != nil { // NKeys may have been added from Accounts parsing, so do an append here o.Nkeys = append(o.Nkeys, auth.nkeys...) } case "http": hp, err := parseListen(v) if err != nil { err := &configErr{tk, err.Error()} errors = append(errors, err) continue } o.HTTPHost = hp.host o.HTTPPort = hp.port case "https": hp, err := parseListen(v) if err != nil { err := &configErr{tk, err.Error()} errors = append(errors, err) continue } o.HTTPHost = hp.host o.HTTPSPort = hp.port case "http_port", "monitor_port": o.HTTPPort = int(v.(int64)) case "https_port": o.HTTPSPort = int(v.(int64)) case "cluster": err := parseCluster(tk, o, &errors, &warnings) if err != nil { errors = append(errors, err) continue } case "gateway": if err := parseGateway(tk, o, &errors, &warnings); err != nil { errors = append(errors, err) continue } case "leaf", "leafnodes": err := parseLeafNodes(tk, o, &errors, &warnings) if err != nil { errors = append(errors, err) continue } case "jetstream": err := parseJetStream(tk, o, &errors, &warnings) if err != nil { errors = append(errors, err) continue } case "logfile", "log_file": o.LogFile = v.(string) case "logfile_size_limit", "log_size_limit": o.LogSizeLimit = v.(int64) case "syslog": o.Syslog = v.(bool) trackExplicitVal(o, &o.inConfig, "Syslog", o.Syslog) case "remote_syslog": o.RemoteSyslog = v.(string) case "pidfile", "pid_file": o.PidFile = v.(string) case "ports_file_dir": o.PortsFileDir = v.(string) case "prof_port": o.ProfPort = int(v.(int64)) case "max_control_line": if v.(int64) > 1<<31-1 { err := &configErr{tk, fmt.Sprintf("%s value is too big", k)} errors = append(errors, err) continue } o.MaxControlLine = int32(v.(int64)) case "max_payload": if v.(int64) > 1<<31-1 { err := &configErr{tk, fmt.Sprintf("%s value is too big", k)} errors = append(errors, err) continue } o.MaxPayload = int32(v.(int64)) case "max_pending": o.MaxPending = v.(int64) case "max_connections", "max_conn": o.MaxConn = int(v.(int64)) case "max_traced_msg_len": o.MaxTracedMsgLen = int(v.(int64)) case "max_subscriptions", "max_subs": o.MaxSubs = int(v.(int64)) case "ping_interval": o.PingInterval = time.Duration(int(v.(int64))) * time.Second case "ping_max": o.MaxPingsOut = int(v.(int64)) case "tls": tc, err := parseTLS(tk) if err != nil { errors = append(errors, err) continue } if o.TLSConfig, err = GenTLSConfig(tc); err != nil { err := &configErr{tk, err.Error()} errors = append(errors, err) continue } o.TLSTimeout = tc.Timeout o.TLSMap = tc.Map case "write_deadline": wd, ok := v.(string) if ok { dur, err := time.ParseDuration(wd) if err != nil { err := &configErr{tk, fmt.Sprintf("error parsing write_deadline: %v", err)} errors = append(errors, err) continue } o.WriteDeadline = dur } else { // Backward compatible with old type, assume this is the // number of seconds. o.WriteDeadline = time.Duration(v.(int64)) * time.Second err := &configWarningErr{ field: k, configErr: configErr{ token: tk, reason: "write_deadline should be converted to a duration", }, } warnings = append(warnings, err) } case "lame_duck_duration": dur, err := time.ParseDuration(v.(string)) if err != nil { err := &configErr{tk, fmt.Sprintf("error parsing lame_duck_duration: %v", err)} errors = append(errors, err) continue } if dur < 30*time.Second { err := &configErr{tk, fmt.Sprintf("invalid lame_duck_duration of %v, minimum is 30 seconds", dur)} errors = append(errors, err) continue } o.LameDuckDuration = dur case "operator", "operators", "roots", "root", "root_operators", "root_operator": opFiles := []string{} switch v := v.(type) { case string: opFiles = append(opFiles, v) case []string: opFiles = append(opFiles, v...) default: err := &configErr{tk, fmt.Sprintf("error parsing operators: unsupported type %T", v)} errors = append(errors, err) } // Assume for now these are file names, but they can also be the JWT itself inline. o.TrustedOperators = make([]*jwt.OperatorClaims, 0, len(opFiles)) for _, fname := range opFiles { opc, err := ReadOperatorJWT(fname) if err != nil { err := &configErr{tk, fmt.Sprintf("error parsing operator JWT: %v", err)} errors = append(errors, err) continue } o.TrustedOperators = append(o.TrustedOperators, opc) } case "resolver", "account_resolver", "accounts_resolver": var memResolverRe = regexp.MustCompile(`(MEM|MEMORY|mem|memory)\s*`) var resolverRe = regexp.MustCompile(`(?:URL|url){1}(?:\({1}\s*"?([^\s"]*)"?\s*\){1})?\s*`) str, ok := v.(string) if !ok { err := &configErr{tk, fmt.Sprintf("error parsing operator resolver, wrong type %T", v)} errors = append(errors, err) continue } if memResolverRe.MatchString(str) { o.AccountResolver = &MemAccResolver{} } else { items := resolverRe.FindStringSubmatch(str) if len(items) == 2 { url := items[1] _, err := parseURL(url, "account resolver") if err != nil { errors = append(errors, &configErr{tk, err.Error()}) continue } if ur, err := NewURLAccResolver(url); err != nil { err := &configErr{tk, err.Error()} errors = append(errors, err) continue } else { o.AccountResolver = ur } } } if o.AccountResolver == nil { err := &configErr{tk, "error parsing account resolver, should be MEM or URL(\"url\")"} errors = append(errors, err) } case "resolver_preload": mp, ok := v.(map[string]interface{}) if !ok { err := &configErr{tk, "preload should be a map of account_public_key:account_jwt"} errors = append(errors, err) continue } o.resolverPreloads = make(map[string]string) for key, val := range mp { tk, val = unwrapValue(val) if jwtstr, ok := val.(string); !ok { err := &configErr{tk, "preload map value should be a string JWT"} errors = append(errors, err) continue } else { // Make sure this is a valid account JWT, that is a config error. // We will warn of expirations, etc later. if _, err := jwt.DecodeAccountClaims(jwtstr); err != nil { err := &configErr{tk, "invalid account JWT"} errors = append(errors, err) continue } o.resolverPreloads[key] = jwtstr } } case "system_account", "system": // Already processed at the beginning so we just skip them // to not treat them as unknown values. continue case "trusted", "trusted_keys": switch v := v.(type) { case string: o.TrustedKeys = []string{v} case []string: o.TrustedKeys = v case []interface{}: keys := make([]string, 0, len(v)) for _, mv := range v { tk, mv = unwrapValue(mv) if key, ok := mv.(string); ok { keys = append(keys, key) } else { err := &configErr{tk, fmt.Sprintf("error parsing trusted: unsupported type in array %T", mv)} errors = append(errors, err) continue } } o.TrustedKeys = keys default: err := &configErr{tk, fmt.Sprintf("error parsing trusted: unsupported type %T", v)} errors = append(errors, err) } // Do a quick sanity check on keys for _, key := range o.TrustedKeys { if !nkeys.IsValidPublicOperatorKey(key) { err := &configErr{tk, fmt.Sprintf("trust key %q required to be a valid public operator nkey", key)} errors = append(errors, err) } } case "connect_error_reports": o.ConnectErrorReports = int(v.(int64)) case "reconnect_error_reports": o.ReconnectErrorReports = int(v.(int64)) default: if au := atomic.LoadInt32(&allowUnknownTopLevelField); au == 0 && !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } errors = append(errors, err) } } } if len(errors) > 0 || len(warnings) > 0 { return &processConfigErr{ errors: errors, warnings: warnings, } } return nil } func trackExplicitVal(opts *Options, pm *map[string]bool, name string, val bool) { m := *pm if m == nil { m = make(map[string]bool) *pm = m } m[name] = val } // hostPort is simple struct to hold parsed listen/addr strings. type hostPort struct { host string port int } // parseListen will parse listen option which is replacing host/net and port func parseListen(v interface{}) (*hostPort, error) { hp := &hostPort{} switch vv := v.(type) { // Only a port case int64: hp.port = int(vv) case string: host, port, err := net.SplitHostPort(vv) if err != nil { return nil, fmt.Errorf("could not parse address string %q", vv) } hp.port, err = strconv.Atoi(port) if err != nil { return nil, fmt.Errorf("could not parse port %q", port) } hp.host = host } return hp, nil } // parseCluster will parse the cluster config. func parseCluster(v interface{}, opts *Options, errors *[]error, warnings *[]error) error { tk, v := unwrapValue(v) cm, ok := v.(map[string]interface{}) if !ok { return &configErr{tk, fmt.Sprintf("Expected map to define cluster, got %T", v)} } for mk, mv := range cm { // Again, unwrap token value if line check is required. tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "listen": hp, err := parseListen(mv) if err != nil { err := &configErr{tk, err.Error()} *errors = append(*errors, err) continue } opts.Cluster.Host = hp.host opts.Cluster.Port = hp.port case "port": opts.Cluster.Port = int(mv.(int64)) case "host", "net": opts.Cluster.Host = mv.(string) case "authorization": auth, err := parseAuthorization(tk, opts, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if auth.users != nil { err := &configErr{tk, "Cluster authorization does not allow multiple users"} *errors = append(*errors, err) continue } opts.Cluster.Username = auth.user opts.Cluster.Password = auth.pass opts.Cluster.AuthTimeout = auth.timeout if auth.defaultPermissions != nil { err := &configWarningErr{ field: mk, configErr: configErr{ token: tk, reason: `setting "permissions" within cluster authorization block is deprecated`, }, } *warnings = append(*warnings, err) // Do not set permissions if they were specified in top-level cluster block. if opts.Cluster.Permissions == nil { setClusterPermissions(&opts.Cluster, auth.defaultPermissions) } } case "routes": ra := mv.([]interface{}) routes, errs := parseURLs(ra, "route") if errs != nil { *errors = append(*errors, errs...) continue } opts.Routes = routes case "tls": config, tlsopts, err := getTLSConfig(tk) if err != nil { *errors = append(*errors, err) continue } opts.Cluster.TLSConfig = config opts.Cluster.TLSTimeout = tlsopts.Timeout opts.Cluster.TLSMap = tlsopts.Map case "cluster_advertise", "advertise": opts.Cluster.Advertise = mv.(string) case "no_advertise": opts.Cluster.NoAdvertise = mv.(bool) trackExplicitVal(opts, &opts.inConfig, "Cluster.NoAdvertise", opts.Cluster.NoAdvertise) case "connect_retries": opts.Cluster.ConnectRetries = int(mv.(int64)) case "permissions": perms, err := parseUserPermissions(mv, errors, warnings) if err != nil { *errors = append(*errors, err) continue } // Dynamic response permissions do not make sense here. if perms.Response != nil { err := &configErr{tk, "Cluster permissions do not support dynamic responses"} *errors = append(*errors, err) continue } // This will possibly override permissions that were define in auth block setClusterPermissions(&opts.Cluster, perms) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } return nil } func parseURLs(a []interface{}, typ string) ([]*url.URL, []error) { var ( errors []error urls = make([]*url.URL, 0, len(a)) ) for _, u := range a { tk, u := unwrapValue(u) sURL := u.(string) url, err := parseURL(sURL, typ) if err != nil { err := &configErr{tk, err.Error()} errors = append(errors, err) continue } urls = append(urls, url) } return urls, errors } func parseURL(u string, typ string) (*url.URL, error) { urlStr := strings.TrimSpace(u) url, err := url.Parse(urlStr) if err != nil { return nil, fmt.Errorf("error parsing %s url [%q]", typ, urlStr) } return url, nil } func parseGateway(v interface{}, o *Options, errors *[]error, warnings *[]error) error { tk, v := unwrapValue(v) gm, ok := v.(map[string]interface{}) if !ok { return &configErr{tk, fmt.Sprintf("Expected gateway to be a map, got %T", v)} } for mk, mv := range gm { // Again, unwrap token value if line check is required. tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "name": o.Gateway.Name = mv.(string) case "listen": hp, err := parseListen(mv) if err != nil { err := &configErr{tk, err.Error()} *errors = append(*errors, err) continue } o.Gateway.Host = hp.host o.Gateway.Port = hp.port case "port": o.Gateway.Port = int(mv.(int64)) case "host", "net": o.Gateway.Host = mv.(string) case "authorization": auth, err := parseAuthorization(tk, o, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if auth.users != nil { *errors = append(*errors, &configErr{tk, "Gateway authorization does not allow multiple users"}) continue } o.Gateway.Username = auth.user o.Gateway.Password = auth.pass o.Gateway.AuthTimeout = auth.timeout case "tls": config, tlsopts, err := getTLSConfig(tk) if err != nil { *errors = append(*errors, err) continue } o.Gateway.TLSConfig = config o.Gateway.TLSTimeout = tlsopts.Timeout o.Gateway.TLSMap = tlsopts.Map case "advertise": o.Gateway.Advertise = mv.(string) case "connect_retries": o.Gateway.ConnectRetries = int(mv.(int64)) case "gateways": gateways, err := parseGateways(mv, errors, warnings) if err != nil { return err } o.Gateway.Gateways = gateways case "reject_unknown": o.Gateway.RejectUnknown = mv.(bool) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } return nil } func parseJetStream(v interface{}, opts *Options, errors *[]error, warnings *[]error) error { tk, v := unwrapValue(v) cm, ok := v.(map[string]interface{}) if !ok { return &configErr{tk, fmt.Sprintf("Expected map to define JetStream, got %T", v)} } opts.JetStream = true for mk, mv := range cm { tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "store_dir", "storedir": opts.StoreDir = mv.(string) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } return nil } // parseLeafNodes will parse the leaf node config. func parseLeafNodes(v interface{}, opts *Options, errors *[]error, warnings *[]error) error { tk, v := unwrapValue(v) cm, ok := v.(map[string]interface{}) if !ok { return &configErr{tk, fmt.Sprintf("Expected map to define a leafnode, got %T", v)} } for mk, mv := range cm { // Again, unwrap token value if line check is required. tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "listen": hp, err := parseListen(mv) if err != nil { err := &configErr{tk, err.Error()} *errors = append(*errors, err) continue } opts.LeafNode.Host = hp.host opts.LeafNode.Port = hp.port case "port": opts.LeafNode.Port = int(mv.(int64)) case "host", "net": opts.LeafNode.Host = mv.(string) case "authorization": auth, err := parseLeafAuthorization(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } opts.LeafNode.Username = auth.user opts.LeafNode.Password = auth.pass opts.LeafNode.AuthTimeout = auth.timeout opts.LeafNode.Account = auth.acc opts.LeafNode.Users = auth.users // Validate user info config for leafnode authorization if err := validateLeafNodeAuthOptions(opts); err != nil { *errors = append(*errors, &configErr{tk, err.Error()}) continue } case "remotes": // Parse the remote options here. remotes, err := parseRemoteLeafNodes(mv, errors, warnings) if err != nil { continue } opts.LeafNode.Remotes = remotes case "reconnect", "reconnect_delay", "reconnect_interval": opts.LeafNode.ReconnectInterval = time.Duration(int(mv.(int64))) * time.Second case "tls": tc, err := parseTLS(tk) if err != nil { *errors = append(*errors, err) continue } if opts.LeafNode.TLSConfig, err = GenTLSConfig(tc); err != nil { err := &configErr{tk, err.Error()} *errors = append(*errors, err) continue } opts.LeafNode.TLSTimeout = tc.Timeout case "leafnode_advertise", "advertise": opts.LeafNode.Advertise = mv.(string) case "no_advertise": opts.LeafNode.NoAdvertise = mv.(bool) trackExplicitVal(opts, &opts.inConfig, "LeafNode.NoAdvertise", opts.LeafNode.NoAdvertise) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } return nil } // This is the authorization parser adapter for the leafnode's // authorization config. func parseLeafAuthorization(v interface{}, errors *[]error, warnings *[]error) (*authorization, error) { var ( am map[string]interface{} tk token auth = &authorization{} ) _, v = unwrapValue(v) am = v.(map[string]interface{}) for mk, mv := range am { tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "user", "username": auth.user = mv.(string) case "pass", "password": auth.pass = mv.(string) case "timeout": at := float64(1) switch mv := mv.(type) { case int64: at = float64(mv) case float64: at = mv } auth.timeout = at case "users": users, err := parseLeafUsers(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } auth.users = users case "account": auth.acc = mv.(string) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } continue } } return auth, nil } // This is a trimmed down version of parseUsers that is adapted // for the users possibly defined in the authorization{} section // of leafnodes {}. func parseLeafUsers(mv interface{}, errors *[]error, warnings *[]error) ([]*User, error) { var ( tk token users = []*User{} ) tk, mv = unwrapValue(mv) // Make sure we have an array uv, ok := mv.([]interface{}) if !ok { return nil, &configErr{tk, fmt.Sprintf("Expected users field to be an array, got %v", mv)} } for _, u := range uv { tk, u = unwrapValue(u) // Check its a map/struct um, ok := u.(map[string]interface{}) if !ok { err := &configErr{tk, fmt.Sprintf("Expected user entry to be a map/struct, got %v", u)} *errors = append(*errors, err) continue } user := &User{} for k, v := range um { tk, v = unwrapValue(v) switch strings.ToLower(k) { case "user", "username": user.Username = v.(string) case "pass", "password": user.Password = v.(string) case "account": // We really want to save just the account name here, but // the User object is *Account. So we create an account object // but it won't be registered anywhere. The server will just // use opts.LeafNode.Users[].Account.Name. Alternatively // we need to create internal objects to store u/p and account // name and have a server structure to hold that. user.Account = NewAccount(v.(string)) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } users = append(users, user) } return users, nil } func parseRemoteLeafNodes(v interface{}, errors *[]error, warnings *[]error) ([]*RemoteLeafOpts, error) { tk, v := unwrapValue(v) ra, ok := v.([]interface{}) if !ok { return nil, &configErr{tk, fmt.Sprintf("Expected remotes field to be an array, got %T", v)} } remotes := make([]*RemoteLeafOpts, 0, len(ra)) for _, r := range ra { tk, r = unwrapValue(r) // Check its a map/struct rm, ok := r.(map[string]interface{}) if !ok { *errors = append(*errors, &configErr{tk, fmt.Sprintf("Expected remote leafnode entry to be a map/struct, got %v", r)}) continue } remote := &RemoteLeafOpts{} for k, v := range rm { tk, v = unwrapValue(v) switch strings.ToLower(k) { case "url", "urls": switch v := v.(type) { case []interface{}, []string: urls, errs := parseURLs(v.([]interface{}), "leafnode") if errs != nil { *errors = append(*errors, errs...) continue } remote.URLs = urls case string: url, err := parseURL(v, "leafnode") if err != nil { *errors = append(*errors, &configErr{tk, err.Error()}) continue } remote.URLs = append(remote.URLs, url) } case "account", "local": remote.LocalAccount = v.(string) case "creds", "credentials": p, err := expandPath(v.(string)) if err != nil { *errors = append(*errors, &configErr{tk, err.Error()}) continue } remote.Credentials = p case "tls": tc, err := parseTLS(tk) if err != nil { *errors = append(*errors, err) continue } if remote.TLSConfig, err = GenTLSConfig(tc); err != nil { *errors = append(*errors, &configErr{tk, err.Error()}) continue } // If ca_file is defined, GenTLSConfig() sets TLSConfig.ClientCAs. // Set RootCAs since this tls.Config is used when soliciting // a connection (therefore behaves as a client). remote.TLSConfig.RootCAs = remote.TLSConfig.ClientCAs if tc.Timeout > 0 { remote.TLSTimeout = tc.Timeout } else { remote.TLSTimeout = float64(DEFAULT_LEAF_TLS_TIMEOUT) } default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } remotes = append(remotes, remote) } return remotes, nil } // Parse TLS and returns a TLSConfig and TLSTimeout. // Used by cluster and gateway parsing. func getTLSConfig(tk token) (*tls.Config, *TLSConfigOpts, error) { tc, err := parseTLS(tk) if err != nil { return nil, nil, err } config, err := GenTLSConfig(tc) if err != nil { err := &configErr{tk, err.Error()} return nil, nil, err } // For clusters/gateways, we will force strict verification. We also act // as both client and server, so will mirror the rootCA to the // clientCA pool. config.ClientAuth = tls.RequireAndVerifyClientCert config.RootCAs = config.ClientCAs return config, tc, nil } func parseGateways(v interface{}, errors *[]error, warnings *[]error) ([]*RemoteGatewayOpts, error) { tk, v := unwrapValue(v) // Make sure we have an array ga, ok := v.([]interface{}) if !ok { return nil, &configErr{tk, fmt.Sprintf("Expected gateways field to be an array, got %T", v)} } gateways := []*RemoteGatewayOpts{} for _, g := range ga { tk, g = unwrapValue(g) // Check its a map/struct gm, ok := g.(map[string]interface{}) if !ok { *errors = append(*errors, &configErr{tk, fmt.Sprintf("Expected gateway entry to be a map/struct, got %v", g)}) continue } gateway := &RemoteGatewayOpts{} for k, v := range gm { tk, v = unwrapValue(v) switch strings.ToLower(k) { case "name": gateway.Name = v.(string) case "tls": tls, tlsopts, err := getTLSConfig(tk) if err != nil { *errors = append(*errors, err) continue } gateway.TLSConfig = tls gateway.TLSTimeout = tlsopts.Timeout case "url": url, err := parseURL(v.(string), "gateway") if err != nil { *errors = append(*errors, &configErr{tk, err.Error()}) continue } gateway.URLs = append(gateway.URLs, url) case "urls": urls, errs := parseURLs(v.([]interface{}), "gateway") if errs != nil { *errors = append(*errors, errs...) continue } gateway.URLs = urls default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } gateways = append(gateways, gateway) } return gateways, nil } // Sets cluster's permissions based on given pub/sub permissions, // doing the appropriate translation. func setClusterPermissions(opts *ClusterOpts, perms *Permissions) { // Import is whether or not we will send a SUB for interest to the other side. // Export is whether or not we will accept a SUB from the remote for a given subject. // Both only effect interest registration. // The parsing sets Import into Publish and Export into Subscribe, convert // accordingly. opts.Permissions = &RoutePermissions{ Import: perms.Publish, Export: perms.Subscribe, } } // Temp structures to hold account import and export defintions since they need // to be processed after being parsed. type export struct { acc *Account sub string accs []string rt ServiceRespType lat *serviceLatency } type importStream struct { acc *Account an string sub string pre string } type importService struct { acc *Account an string sub string to string } // Checks if an account name is reserved. func isReservedAccount(name string) bool { return name == globalAccountName } // parseAccounts will parse the different accounts syntax. func parseAccounts(v interface{}, opts *Options, errors *[]error, warnings *[]error) error { var ( importStreams []*importStream importServices []*importService exportStreams []*export exportServices []*export ) tk, v := unwrapValue(v) switch vv := v.(type) { // Simple array of account names. case []interface{}, []string: m := make(map[string]struct{}, len(v.([]interface{}))) for _, n := range v.([]interface{}) { tk, name := unwrapValue(n) ns := name.(string) // Check for reserved names. if isReservedAccount(ns) { err := &configErr{tk, fmt.Sprintf("%q is a Reserved Account", ns)} *errors = append(*errors, err) continue } if _, ok := m[ns]; ok { err := &configErr{tk, fmt.Sprintf("Duplicate Account Entry: %s", ns)} *errors = append(*errors, err) continue } opts.Accounts = append(opts.Accounts, NewAccount(ns)) m[ns] = struct{}{} } // More common map entry case map[string]interface{}: // Track users across accounts, must be unique across // accounts and nkeys vs users. uorn := make(map[string]struct{}) for aname, mv := range vv { tk, amv := unwrapValue(mv) // Skip referenced config vars within the account block. if tk.IsUsedVariable() { continue } // These should be maps. mv, ok := amv.(map[string]interface{}) if !ok { err := &configErr{tk, "Expected map entries for accounts"} *errors = append(*errors, err) continue } if isReservedAccount(aname) { err := &configErr{tk, fmt.Sprintf("%q is a Reserved Account", aname)} *errors = append(*errors, err) continue } acc := NewAccount(aname) opts.Accounts = append(opts.Accounts, acc) for k, v := range mv { tk, mv := unwrapValue(v) switch strings.ToLower(k) { case "nkey": nk, ok := mv.(string) if !ok || !nkeys.IsValidPublicAccountKey(nk) { err := &configErr{tk, fmt.Sprintf("Not a valid public nkey for an account: %q", mv)} *errors = append(*errors, err) continue } acc.Nkey = nk case "imports": streams, services, err := parseAccountImports(tk, acc, errors, warnings) if err != nil { *errors = append(*errors, err) continue } importStreams = append(importStreams, streams...) importServices = append(importServices, services...) case "exports": streams, services, err := parseAccountExports(tk, acc, errors, warnings) if err != nil { *errors = append(*errors, err) continue } exportStreams = append(exportStreams, streams...) exportServices = append(exportServices, services...) case "users": nkeys, users, err := parseUsers(mv, opts, errors, warnings) if err != nil { *errors = append(*errors, err) continue } for _, u := range users { if _, ok := uorn[u.Username]; ok { err := &configErr{tk, fmt.Sprintf("Duplicate user %q detected", u.Username)} *errors = append(*errors, err) continue } uorn[u.Username] = struct{}{} u.Account = acc } opts.Users = append(opts.Users, users...) for _, u := range nkeys { if _, ok := uorn[u.Nkey]; ok { err := &configErr{tk, fmt.Sprintf("Duplicate nkey %q detected", u.Nkey)} *errors = append(*errors, err) continue } uorn[u.Nkey] = struct{}{} u.Account = acc } opts.Nkeys = append(opts.Nkeys, nkeys...) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } } } } } // Bail already if there are previous errors. if len(*errors) > 0 { return nil } // Parse Imports and Exports here after all accounts defined. // Do exports first since they need to be defined for imports to succeed // since we do permissions checks. // Create a lookup map for accounts lookups. am := make(map[string]*Account, len(opts.Accounts)) for _, a := range opts.Accounts { am[a.Name] = a } // Do stream exports for _, stream := range exportStreams { // Make array of accounts if applicable. var accounts []*Account for _, an := range stream.accs { ta := am[an] if ta == nil { msg := fmt.Sprintf("%q account not defined for stream export", an) *errors = append(*errors, &configErr{tk, msg}) continue } accounts = append(accounts, ta) } if err := stream.acc.AddStreamExport(stream.sub, accounts); err != nil { msg := fmt.Sprintf("Error adding stream export %q: %v", stream.sub, err) *errors = append(*errors, &configErr{tk, msg}) continue } } for _, service := range exportServices { // Make array of accounts if applicable. var accounts []*Account for _, an := range service.accs { ta := am[an] if ta == nil { msg := fmt.Sprintf("%q account not defined for service export", an) *errors = append(*errors, &configErr{tk, msg}) continue } accounts = append(accounts, ta) } if err := service.acc.AddServiceExportWithResponse(service.sub, service.rt, accounts); err != nil { msg := fmt.Sprintf("Error adding service export %q: %v", service.sub, err) *errors = append(*errors, &configErr{tk, msg}) continue } if service.lat != nil { if opts.SystemAccount == "" { msg := fmt.Sprintf("Error adding service latency sampling for %q: %v", service.sub, ErrNoSysAccount.Error()) *errors = append(*errors, &configErr{tk, msg}) continue } if err := service.acc.TrackServiceExportWithSampling(service.sub, service.lat.subject, int(service.lat.sampling)); err != nil { msg := fmt.Sprintf("Error adding service latency sampling for %q on subject %q: %v", service.sub, service.lat.subject, err) *errors = append(*errors, &configErr{tk, msg}) continue } } } for _, stream := range importStreams { ta := am[stream.an] if ta == nil { msg := fmt.Sprintf("%q account not defined for stream import", stream.an) *errors = append(*errors, &configErr{tk, msg}) continue } if err := stream.acc.AddStreamImport(ta, stream.sub, stream.pre); err != nil { msg := fmt.Sprintf("Error adding stream import %q: %v", stream.sub, err) *errors = append(*errors, &configErr{tk, msg}) continue } } for _, service := range importServices { ta := am[service.an] if ta == nil { msg := fmt.Sprintf("%q account not defined for service import", service.an) *errors = append(*errors, &configErr{tk, msg}) continue } if service.to == "" { service.to = service.sub } if err := service.acc.AddServiceImport(ta, service.to, service.sub); err != nil { msg := fmt.Sprintf("Error adding service import %q: %v", service.sub, err) *errors = append(*errors, &configErr{tk, msg}) continue } } return nil } // Parse the account exports func parseAccountExports(v interface{}, acc *Account, errors, warnings *[]error) ([]*export, []*export, error) { // This should be an array of objects/maps. tk, v := unwrapValue(v) ims, ok := v.([]interface{}) if !ok { return nil, nil, &configErr{tk, fmt.Sprintf("Exports should be an array, got %T", v)} } var services []*export var streams []*export for _, v := range ims { // Should have stream or service stream, service, err := parseExportStreamOrService(v, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if service != nil { service.acc = acc services = append(services, service) } if stream != nil { stream.acc = acc streams = append(streams, stream) } } return streams, services, nil } // Parse the account imports func parseAccountImports(v interface{}, acc *Account, errors, warnings *[]error) ([]*importStream, []*importService, error) { // This should be an array of objects/maps. tk, v := unwrapValue(v) ims, ok := v.([]interface{}) if !ok { return nil, nil, &configErr{tk, fmt.Sprintf("Imports should be an array, got %T", v)} } var services []*importService var streams []*importStream svcSubjects := map[string]*importService{} for _, v := range ims { // Should have stream or service stream, service, err := parseImportStreamOrService(v, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if service != nil { if dup := svcSubjects[service.to]; dup != nil { tk, _ := unwrapValue(v) err := &configErr{tk, fmt.Sprintf("Duplicate service import subject %q, previously used in import for account %q, subject %q", service.to, dup.an, dup.sub)} *errors = append(*errors, err) continue } svcSubjects[service.to] = service service.acc = acc services = append(services, service) } if stream != nil { stream.acc = acc streams = append(streams, stream) } } return streams, services, nil } // Helper to parse an embedded account description for imported services or streams. func parseAccount(v map[string]interface{}, errors, warnings *[]error) (string, string, error) { var accountName, subject string for mk, mv := range v { tk, mv := unwrapValue(mv) switch strings.ToLower(mk) { case "account": accountName = mv.(string) case "subject": subject = mv.(string) default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } } } return accountName, subject, nil } // Parse an export stream or service. // e.g. // {stream: "public.>"} # No accounts means public. // {stream: "synadia.private.>", accounts: [cncf, natsio]} // {service: "pub.request"} # No accounts means public. // {service: "pub.special.request", accounts: [nats.io]} func parseExportStreamOrService(v interface{}, errors, warnings *[]error) (*export, *export, error) { var ( curStream *export curService *export accounts []string rt ServiceRespType rtSeen bool rtToken token lat *serviceLatency latToken token ) tk, v := unwrapValue(v) vv, ok := v.(map[string]interface{}) if !ok { return nil, nil, &configErr{tk, fmt.Sprintf("Export Items should be a map with type entry, got %T", v)} } for mk, mv := range vv { tk, mv := unwrapValue(mv) switch strings.ToLower(mk) { case "stream": if curService != nil { err := &configErr{tk, fmt.Sprintf("Detected stream %q but already saw a service", mv)} *errors = append(*errors, err) continue } if rtToken != nil { err := &configErr{rtToken, "Detected response directive on non-service"} *errors = append(*errors, err) continue } if latToken != nil { err := &configErr{latToken, "Detected latency directive on non-service"} *errors = append(*errors, err) continue } mvs, ok := mv.(string) if !ok { err := &configErr{tk, fmt.Sprintf("Expected stream name to be string, got %T", mv)} *errors = append(*errors, err) continue } curStream = &export{sub: mvs} if accounts != nil { curStream.accs = accounts } case "response", "response_type": rtSeen = true rtToken = tk mvs, ok := mv.(string) if !ok { err := &configErr{tk, fmt.Sprintf("Expected response type to be string, got %T", mv)} *errors = append(*errors, err) continue } switch strings.ToLower(mvs) { case "single", "singleton": rt = Singleton case "stream": rt = Streamed case "chunk", "chunked": rt = Chunked default: err := &configErr{tk, fmt.Sprintf("Unknown response type: %q", mvs)} *errors = append(*errors, err) continue } if curService != nil { curService.rt = rt } if curStream != nil { err := &configErr{tk, "Detected response directive on non-service"} *errors = append(*errors, err) } case "service": if curStream != nil { err := &configErr{tk, fmt.Sprintf("Detected service %q but already saw a stream", mv)} *errors = append(*errors, err) continue } mvs, ok := mv.(string) if !ok { err := &configErr{tk, fmt.Sprintf("Expected service name to be string, got %T", mv)} *errors = append(*errors, err) continue } curService = &export{sub: mvs} if accounts != nil { curService.accs = accounts } if rtSeen { curService.rt = rt } if lat != nil { curService.lat = lat } case "accounts": for _, iv := range mv.([]interface{}) { _, mv := unwrapValue(iv) accounts = append(accounts, mv.(string)) } if curStream != nil { curStream.accs = accounts } else if curService != nil { curService.accs = accounts } case "latency": latToken = tk var err error lat, err = parseServiceLatency(tk, mv) if err != nil { *errors = append(*errors, err) continue } if curStream != nil { err = &configErr{tk, "Detected latency directive on non-service"} *errors = append(*errors, err) continue } if curService != nil { curService.lat = lat } default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } } } return curStream, curService, nil } // parseServiceLatency returns a latency config block. func parseServiceLatency(root token, v interface{}) (*serviceLatency, error) { if subject, ok := v.(string); ok { return &serviceLatency{ subject: subject, sampling: DEFAULT_SERVICE_LATENCY_SAMPLING, }, nil } latency, ok := v.(map[string]interface{}) if !ok { return nil, &configErr{token: root, reason: fmt.Sprintf("Expected latency entry to be a map/struct or string, got %T", v)} } sl := serviceLatency{ sampling: DEFAULT_SERVICE_LATENCY_SAMPLING, } // Read sampling value. if v, ok := latency["sampling"]; ok { tk, v := unwrapValue(v) var sample int64 switch vv := v.(type) { case int64: // Sample is an int, like 50. sample = vv case string: // Sample is a string, like "50%". s := strings.TrimSuffix(vv, "%") n, err := strconv.Atoi(s) if err != nil { return nil, &configErr{token: tk, reason: fmt.Sprintf("Failed to parse latency sample: %v", err)} } sample = int64(n) default: return nil, &configErr{token: tk, reason: fmt.Sprintf("Expected latency sample to be a string or map/struct, got %T", v)} } if sample < 1 || sample > 100 { return nil, &configErr{token: tk, reason: ErrBadSampling.Error()} } sl.sampling = int8(sample) } // Read subject value. v, ok = latency["subject"] if !ok { return nil, &configErr{token: root, reason: "Latency subject required, but missing"} } tk, v := unwrapValue(v) subject, ok := v.(string) if !ok { return nil, &configErr{token: tk, reason: fmt.Sprintf("Expected latency subject to be a string, got %T", subject)} } sl.subject = subject return &sl, nil } // Parse an import stream or service. // e.g. // {stream: {account: "synadia", subject:"public.synadia"}, prefix: "imports.synadia"} // {stream: {account: "synadia", subject:"synadia.private.*"}} // {service: {account: "synadia", subject: "pub.special.request"}, to: "synadia.request"} func parseImportStreamOrService(v interface{}, errors, warnings *[]error) (*importStream, *importService, error) { var ( curStream *importStream curService *importService pre, to string ) tk, mv := unwrapValue(v) vv, ok := mv.(map[string]interface{}) if !ok { return nil, nil, &configErr{tk, fmt.Sprintf("Import Items should be a map with type entry, got %T", mv)} } for mk, mv := range vv { tk, mv := unwrapValue(mv) switch strings.ToLower(mk) { case "stream": if curService != nil { err := &configErr{tk, "Detected stream but already saw a service"} *errors = append(*errors, err) continue } ac, ok := mv.(map[string]interface{}) if !ok { err := &configErr{tk, fmt.Sprintf("Stream entry should be an account map, got %T", mv)} *errors = append(*errors, err) continue } // Make sure this is a map with account and subject accountName, subject, err := parseAccount(ac, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if accountName == "" || subject == "" { err := &configErr{tk, "Expect an account name and a subject"} *errors = append(*errors, err) continue } curStream = &importStream{an: accountName, sub: subject} if pre != "" { curStream.pre = pre } case "service": if curStream != nil { err := &configErr{tk, "Detected service but already saw a stream"} *errors = append(*errors, err) continue } ac, ok := mv.(map[string]interface{}) if !ok { err := &configErr{tk, fmt.Sprintf("Service entry should be an account map, got %T", mv)} *errors = append(*errors, err) continue } // Make sure this is a map with account and subject accountName, subject, err := parseAccount(ac, errors, warnings) if err != nil { *errors = append(*errors, err) continue } if accountName == "" || subject == "" { err := &configErr{tk, "Expect an account name and a subject"} *errors = append(*errors, err) continue } curService = &importService{an: accountName, sub: subject} if to != "" { curService.to = to } case "prefix": pre = mv.(string) if curStream != nil { curStream.pre = pre } case "to": to = mv.(string) if curService != nil { curService.to = to } default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } } } return curStream, curService, nil } // Helper function to parse Authorization configs. func parseAuthorization(v interface{}, opts *Options, errors *[]error, warnings *[]error) (*authorization, error) { var ( am map[string]interface{} tk token auth = &authorization{} ) _, v = unwrapValue(v) am = v.(map[string]interface{}) for mk, mv := range am { tk, mv = unwrapValue(mv) switch strings.ToLower(mk) { case "user", "username": auth.user = mv.(string) case "pass", "password": auth.pass = mv.(string) case "token": auth.token = mv.(string) case "timeout": at := float64(1) switch mv := mv.(type) { case int64: at = float64(mv) case float64: at = mv } auth.timeout = at case "users": nkeys, users, err := parseUsers(tk, opts, errors, warnings) if err != nil { *errors = append(*errors, err) continue } auth.users = users auth.nkeys = nkeys case "default_permission", "default_permissions", "permissions": permissions, err := parseUserPermissions(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } auth.defaultPermissions = permissions default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: mk, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) } continue } // Now check for permission defaults with multiple users, etc. if auth.users != nil && auth.defaultPermissions != nil { for _, user := range auth.users { if user.Permissions == nil { user.Permissions = auth.defaultPermissions } } } } return auth, nil } // Helper function to parse multiple users array with optional permissions. func parseUsers(mv interface{}, opts *Options, errors *[]error, warnings *[]error) ([]*NkeyUser, []*User, error) { var ( tk token keys []*NkeyUser users = []*User{} ) tk, mv = unwrapValue(mv) // Make sure we have an array uv, ok := mv.([]interface{}) if !ok { return nil, nil, &configErr{tk, fmt.Sprintf("Expected users field to be an array, got %v", mv)} } for _, u := range uv { tk, u = unwrapValue(u) // Check its a map/struct um, ok := u.(map[string]interface{}) if !ok { err := &configErr{tk, fmt.Sprintf("Expected user entry to be a map/struct, got %v", u)} *errors = append(*errors, err) continue } var ( user = &User{} nkey = &NkeyUser{} perms *Permissions err error ) for k, v := range um { // Also needs to unwrap first tk, v = unwrapValue(v) switch strings.ToLower(k) { case "nkey": nkey.Nkey = v.(string) case "user", "username": user.Username = v.(string) case "pass", "password": user.Password = v.(string) case "permission", "permissions", "authorization": perms, err = parseUserPermissions(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } default: if !tk.IsUsedVariable() { err := &unknownConfigFieldErr{ field: k, configErr: configErr{ token: tk, }, } *errors = append(*errors, err) continue } } } // Place perms if we have them. if perms != nil { // nkey takes precedent. if nkey.Nkey != "" { nkey.Permissions = perms } else { user.Permissions = perms } } // Check to make sure we have at least an nkey or username <password> defined. if nkey.Nkey == "" && user.Username == "" { return nil, nil, &configErr{tk, "User entry requires a user"} } else if nkey.Nkey != "" { // Make sure the nkey a proper public nkey for a user.. if !nkeys.IsValidPublicUserKey(nkey.Nkey) { return nil, nil, &configErr{tk, "Not a valid public nkey for a user"} } // If we have user or password defined here that is an error. if user.Username != "" || user.Password != "" { return nil, nil, &configErr{tk, "Nkey users do not take usernames or passwords"} } keys = append(keys, nkey) } else { users = append(users, user) } } return keys, users, nil } // Helper function to parse user/account permissions func parseUserPermissions(mv interface{}, errors, warnings *[]error) (*Permissions, error) { var ( tk token p = &Permissions{} ) tk, mv = unwrapValue(mv) pm, ok := mv.(map[string]interface{}) if !ok { return nil, &configErr{tk, fmt.Sprintf("Expected permissions to be a map/struct, got %+v", mv)} } for k, v := range pm { tk, mv = unwrapValue(v) switch strings.ToLower(k) { // For routes: // Import is Publish // Export is Subscribe case "pub", "publish", "import": perms, err := parseVariablePermissions(mv, errors, warnings) if err != nil { *errors = append(*errors, err) continue } p.Publish = perms case "sub", "subscribe", "export": perms, err := parseVariablePermissions(mv, errors, warnings) if err != nil { *errors = append(*errors, err) continue } p.Subscribe = perms case "publish_allow_responses", "allow_responses": rp := &ResponsePermission{ MaxMsgs: DEFAULT_ALLOW_RESPONSE_MAX_MSGS, Expires: DEFAULT_ALLOW_RESPONSE_EXPIRATION, } // Try boolean first responses, ok := mv.(bool) if ok { if responses { p.Response = rp } } else { p.Response = parseAllowResponses(v, errors, warnings) } if p.Response != nil { if p.Publish == nil { p.Publish = &SubjectPermission{} } if p.Publish.Allow == nil { // We turn off the blanket allow statement. p.Publish.Allow = []string{} } } default: if !tk.IsUsedVariable() { err := &configErr{tk, fmt.Sprintf("Unknown field %q parsing permissions", k)} *errors = append(*errors, err) } } } return p, nil } // Top level parser for authorization configurations. func parseVariablePermissions(v interface{}, errors, warnings *[]error) (*SubjectPermission, error) { switch vv := v.(type) { case map[string]interface{}: // New style with allow and/or deny properties. return parseSubjectPermission(vv, errors, warnings) default: // Old style return parseOldPermissionStyle(v, errors, warnings) } } // Helper function to parse subject singletons and/or arrays func parseSubjects(v interface{}, errors, warnings *[]error) ([]string, error) { tk, v := unwrapValue(v) var subjects []string switch vv := v.(type) { case string: subjects = append(subjects, vv) case []string: subjects = vv case []interface{}: for _, i := range vv { tk, i := unwrapValue(i) subject, ok := i.(string) if !ok { return nil, &configErr{tk, "Subject in permissions array cannot be cast to string"} } subjects = append(subjects, subject) } default: return nil, &configErr{tk, fmt.Sprintf("Expected subject permissions to be a subject, or array of subjects, got %T", v)} } if err := checkSubjectArray(subjects); err != nil { return nil, &configErr{tk, err.Error()} } return subjects, nil } // Helper function to parse a ResponsePermission. func parseAllowResponses(v interface{}, errors, warnings *[]error) *ResponsePermission { tk, v := unwrapValue(v) // Check if this is a map. pm, ok := v.(map[string]interface{}) if !ok { err := &configErr{tk, "error parsing response permissions, expected a boolean or a map"} *errors = append(*errors, err) return nil } rp := &ResponsePermission{ MaxMsgs: DEFAULT_ALLOW_RESPONSE_MAX_MSGS, Expires: DEFAULT_ALLOW_RESPONSE_EXPIRATION, } for k, v := range pm { tk, v = unwrapValue(v) switch strings.ToLower(k) { case "max", "max_msgs", "max_messages", "max_responses": max := int(v.(int64)) // Negative values are accepted (mean infinite), and 0 // means default value (set above). if max != 0 { rp.MaxMsgs = max } case "expires", "expiration", "ttl": wd, ok := v.(string) if ok { ttl, err := time.ParseDuration(wd) if err != nil { err := &configErr{tk, fmt.Sprintf("error parsing expires: %v", err)} *errors = append(*errors, err) return nil } // Negative values are accepted (mean infinite), and 0 // means default value (set above). if ttl != 0 { rp.Expires = ttl } } else { err := &configErr{tk, "error parsing expires, not a duration string"} *errors = append(*errors, err) return nil } default: if !tk.IsUsedVariable() { err := &configErr{tk, fmt.Sprintf("Unknown field %q parsing permissions", k)} *errors = append(*errors, err) } } } return rp } // Helper function to parse old style authorization configs. func parseOldPermissionStyle(v interface{}, errors, warnings *[]error) (*SubjectPermission, error) { subjects, err := parseSubjects(v, errors, warnings) if err != nil { return nil, err } return &SubjectPermission{Allow: subjects}, nil } // Helper function to parse new style authorization into a SubjectPermission with Allow and Deny. func parseSubjectPermission(v interface{}, errors, warnings *[]error) (*SubjectPermission, error) { m := v.(map[string]interface{}) if len(m) == 0 { return nil, nil } p := &SubjectPermission{} for k, v := range m { tk, _ := unwrapValue(v) switch strings.ToLower(k) { case "allow": subjects, err := parseSubjects(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } p.Allow = subjects case "deny": subjects, err := parseSubjects(tk, errors, warnings) if err != nil { *errors = append(*errors, err) continue } p.Deny = subjects default: if !tk.IsUsedVariable() { err := &configErr{tk, fmt.Sprintf("Unknown field name %q parsing subject permissions, only 'allow' or 'deny' are permitted", k)} *errors = append(*errors, err) } } } return p, nil } // Helper function to validate subjects, etc for account permissioning. func checkSubjectArray(sa []string) error { for _, s := range sa { if !IsValidSubject(s) { return fmt.Errorf("subject %q is not a valid subject", s) } } return nil } // PrintTLSHelpAndDie prints TLS usage and exits. func PrintTLSHelpAndDie() { fmt.Printf("%s", tlsUsage) for k := range cipherMap { fmt.Printf(" %s\n", k) } fmt.Printf("\nAvailable curve preferences include:\n") for k := range curvePreferenceMap { fmt.Printf(" %s\n", k) } os.Exit(0) } func parseCipher(cipherName string) (uint16, error) { cipher, exists := cipherMap[cipherName] if !exists { return 0, fmt.Errorf("unrecognized cipher %s", cipherName) } return cipher, nil } func parseCurvePreferences(curveName string) (tls.CurveID, error) { curve, exists := curvePreferenceMap[curveName] if !exists { return 0, fmt.Errorf("unrecognized curve preference %s", curveName) } return curve, nil } // Helper function to parse TLS configs. func parseTLS(v interface{}) (*TLSConfigOpts, error) { var ( tlsm map[string]interface{} tc = TLSConfigOpts{} ) _, v = unwrapValue(v) tlsm = v.(map[string]interface{}) for mk, mv := range tlsm { tk, mv := unwrapValue(mv) switch strings.ToLower(mk) { case "cert_file": certFile, ok := mv.(string) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'cert_file' to be filename"} } tc.CertFile = certFile case "key_file": keyFile, ok := mv.(string) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'key_file' to be filename"} } tc.KeyFile = keyFile case "ca_file": caFile, ok := mv.(string) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'ca_file' to be filename"} } tc.CaFile = caFile case "insecure": insecure, ok := mv.(bool) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'insecure' to be a boolean"} } tc.Insecure = insecure case "verify": verify, ok := mv.(bool) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'verify' to be a boolean"} } tc.Verify = verify case "verify_and_map": verify, ok := mv.(bool) if !ok { return nil, &configErr{tk, "error parsing tls config, expected 'verify_and_map' to be a boolean"} } tc.Verify = verify tc.Map = verify case "cipher_suites": ra := mv.([]interface{}) if len(ra) == 0 { return nil, &configErr{tk, "error parsing tls config, 'cipher_suites' cannot be empty"} } tc.Ciphers = make([]uint16, 0, len(ra)) for _, r := range ra { tk, r := unwrapValue(r) cipher, err := parseCipher(r.(string)) if err != nil { return nil, &configErr{tk, err.Error()} } tc.Ciphers = append(tc.Ciphers, cipher) } case "curve_preferences": ra := mv.([]interface{}) if len(ra) == 0 { return nil, &configErr{tk, "error parsing tls config, 'curve_preferences' cannot be empty"} } tc.CurvePreferences = make([]tls.CurveID, 0, len(ra)) for _, r := range ra { tk, r := unwrapValue(r) cps, err := parseCurvePreferences(r.(string)) if err != nil { return nil, &configErr{tk, err.Error()} } tc.CurvePreferences = append(tc.CurvePreferences, cps) } case "timeout": at := float64(0) switch mv := mv.(type) { case int64: at = float64(mv) case float64: at = mv } tc.Timeout = at default: return nil, &configErr{tk, fmt.Sprintf("error parsing tls config, unknown field [%q]", mk)} } } // If cipher suites were not specified then use the defaults if tc.Ciphers == nil { tc.Ciphers = defaultCipherSuites() } // If curve preferences were not specified, then use the defaults if tc.CurvePreferences == nil { tc.CurvePreferences = defaultCurvePreferences() } return &tc, nil } // GenTLSConfig loads TLS related configuration parameters. func GenTLSConfig(tc *TLSConfigOpts) (*tls.Config, error) { // Create the tls.Config from our options before including the certs. // It will determine the cipher suites that we prefer. // FIXME(dlc) change if ARM based. config := tls.Config{ MinVersion: tls.VersionTLS12, CipherSuites: tc.Ciphers, PreferServerCipherSuites: true, CurvePreferences: tc.CurvePreferences, InsecureSkipVerify: tc.Insecure, } switch { case tc.CertFile != "" && tc.KeyFile == "": return nil, fmt.Errorf("missing 'key_file' in TLS configuration") case tc.CertFile == "" && tc.KeyFile != "": return nil, fmt.Errorf("missing 'cert_file' in TLS configuration") case tc.CertFile != "" && tc.KeyFile != "": // Now load in cert and private key cert, err := tls.LoadX509KeyPair(tc.CertFile, tc.KeyFile) if err != nil { return nil, fmt.Errorf("error parsing X509 certificate/key pair: %v", err) } cert.Leaf, err = x509.ParseCertificate(cert.Certificate[0]) if err != nil { return nil, fmt.Errorf("error parsing certificate: %v", err) } config.Certificates = []tls.Certificate{cert} } // Require client certificates as needed if tc.Verify { config.ClientAuth = tls.RequireAndVerifyClientCert } // Add in CAs if applicable. if tc.CaFile != "" { rootPEM, err := ioutil.ReadFile(tc.CaFile) if err != nil || rootPEM == nil { return nil, err } pool := x509.NewCertPool() ok := pool.AppendCertsFromPEM(rootPEM) if !ok { return nil, fmt.Errorf("failed to parse root ca certificate") } config.ClientCAs = pool } return &config, nil } // MergeOptions will merge two options giving preference to the flagOpts // if the item is present. func MergeOptions(fileOpts, flagOpts *Options) *Options { if fileOpts == nil { return flagOpts } if flagOpts == nil { return fileOpts } // Merge the two, flagOpts override opts := *fileOpts if flagOpts.Port != 0 { opts.Port = flagOpts.Port } if flagOpts.Host != "" { opts.Host = flagOpts.Host } if flagOpts.ClientAdvertise != "" { opts.ClientAdvertise = flagOpts.ClientAdvertise } if flagOpts.Username != "" { opts.Username = flagOpts.Username } if flagOpts.Password != "" { opts.Password = flagOpts.Password } if flagOpts.Authorization != "" { opts.Authorization = flagOpts.Authorization } if flagOpts.HTTPPort != 0 { opts.HTTPPort = flagOpts.HTTPPort } if flagOpts.Debug { opts.Debug = true } if flagOpts.Trace { opts.Trace = true } if flagOpts.Logtime { opts.Logtime = true } if flagOpts.LogFile != "" { opts.LogFile = flagOpts.LogFile } if flagOpts.PidFile != "" { opts.PidFile = flagOpts.PidFile } if flagOpts.PortsFileDir != "" { opts.PortsFileDir = flagOpts.PortsFileDir } if flagOpts.ProfPort != 0 { opts.ProfPort = flagOpts.ProfPort } if flagOpts.Cluster.ListenStr != "" { opts.Cluster.ListenStr = flagOpts.Cluster.ListenStr } if flagOpts.Cluster.NoAdvertise { opts.Cluster.NoAdvertise = true } if flagOpts.Cluster.ConnectRetries != 0 { opts.Cluster.ConnectRetries = flagOpts.Cluster.ConnectRetries } if flagOpts.Cluster.Advertise != "" { opts.Cluster.Advertise = flagOpts.Cluster.Advertise } if flagOpts.RoutesStr != "" { mergeRoutes(&opts, flagOpts) } return &opts } // RoutesFromStr parses route URLs from a string func RoutesFromStr(routesStr string) []*url.URL { routes := strings.Split(routesStr, ",") if len(routes) == 0 { return nil } routeUrls := []*url.URL{} for _, r := range routes { r = strings.TrimSpace(r) u, _ := url.Parse(r) routeUrls = append(routeUrls, u) } return routeUrls } // This will merge the flag routes and override anything that was present. func mergeRoutes(opts, flagOpts *Options) { routeUrls := RoutesFromStr(flagOpts.RoutesStr) if routeUrls == nil { return } opts.Routes = routeUrls opts.RoutesStr = flagOpts.RoutesStr } // RemoveSelfReference removes this server from an array of routes func RemoveSelfReference(clusterPort int, routes []*url.URL) ([]*url.URL, error) { var cleanRoutes []*url.URL cport := strconv.Itoa(clusterPort) selfIPs, err := getInterfaceIPs() if err != nil { return nil, err } for _, r := range routes { host, port, err := net.SplitHostPort(r.Host) if err != nil { return nil, err } ipList, err := getURLIP(host) if err != nil { return nil, err } if cport == port && isIPInList(selfIPs, ipList) { continue } cleanRoutes = append(cleanRoutes, r) } return cleanRoutes, nil } func isIPInList(list1 []net.IP, list2 []net.IP) bool { for _, ip1 := range list1 { for _, ip2 := range list2 { if ip1.Equal(ip2) { return true } } } return false } func getURLIP(ipStr string) ([]net.IP, error) { ipList := []net.IP{} ip := net.ParseIP(ipStr) if ip != nil { ipList = append(ipList, ip) return ipList, nil } hostAddr, err := net.LookupHost(ipStr) if err != nil { return nil, fmt.Errorf("Error looking up host with route hostname: %v", err) } for _, addr := range hostAddr { ip = net.ParseIP(addr) if ip != nil { ipList = append(ipList, ip) } } return ipList, nil } func getInterfaceIPs() ([]net.IP, error) { var localIPs []net.IP interfaceAddr, err := net.InterfaceAddrs() if err != nil { return nil, fmt.Errorf("Error getting self referencing address: %v", err) } for i := 0; i < len(interfaceAddr); i++ { interfaceIP, _, _ := net.ParseCIDR(interfaceAddr[i].String()) if net.ParseIP(interfaceIP.String()) != nil { localIPs = append(localIPs, interfaceIP) } else { return nil, fmt.Errorf("Error parsing self referencing address: %v", err) } } return localIPs, nil } func setBaselineOptions(opts *Options) { // Setup non-standard Go defaults if opts.Host == "" { opts.Host = DEFAULT_HOST } if opts.HTTPHost == "" { // Default to same bind from server if left undefined opts.HTTPHost = opts.Host } if opts.Port == 0 { opts.Port = DEFAULT_PORT } else if opts.Port == RANDOM_PORT { // Choose randomly inside of net.Listen opts.Port = 0 } if opts.MaxConn == 0 { opts.MaxConn = DEFAULT_MAX_CONNECTIONS } if opts.PingInterval == 0 { opts.PingInterval = DEFAULT_PING_INTERVAL } if opts.MaxPingsOut == 0 { opts.MaxPingsOut = DEFAULT_PING_MAX_OUT } if opts.TLSTimeout == 0 { opts.TLSTimeout = float64(TLS_TIMEOUT) / float64(time.Second) } if opts.AuthTimeout == 0 { opts.AuthTimeout = float64(AUTH_TIMEOUT) / float64(time.Second) } if opts.Cluster.Port != 0 { if opts.Cluster.Host == "" { opts.Cluster.Host = DEFAULT_HOST } if opts.Cluster.TLSTimeout == 0 { opts.Cluster.TLSTimeout = float64(TLS_TIMEOUT) / float64(time.Second) } if opts.Cluster.AuthTimeout == 0 { opts.Cluster.AuthTimeout = float64(AUTH_TIMEOUT) / float64(time.Second) } } if opts.LeafNode.Port != 0 { if opts.LeafNode.Host == "" { opts.LeafNode.Host = DEFAULT_HOST } if opts.LeafNode.TLSTimeout == 0 { opts.LeafNode.TLSTimeout = float64(TLS_TIMEOUT) / float64(time.Second) } if opts.LeafNode.AuthTimeout == 0 { opts.LeafNode.AuthTimeout = float64(AUTH_TIMEOUT) / float64(time.Second) } } // Set baseline connect port for remotes. for _, r := range opts.LeafNode.Remotes { if r != nil { for _, u := range r.URLs { if u.Port() == "" { u.Host = net.JoinHostPort(u.Host, strconv.Itoa(DEFAULT_LEAFNODE_PORT)) } } } } // Set this regardless of opts.LeafNode.Port if opts.LeafNode.ReconnectInterval == 0 { opts.LeafNode.ReconnectInterval = DEFAULT_LEAF_NODE_RECONNECT } if opts.MaxControlLine == 0 { opts.MaxControlLine = MAX_CONTROL_LINE_SIZE } if opts.MaxPayload == 0 { opts.MaxPayload = MAX_PAYLOAD_SIZE } if opts.MaxPending == 0 { opts.MaxPending = MAX_PENDING_SIZE } if opts.WriteDeadline == time.Duration(0) { opts.WriteDeadline = DEFAULT_FLUSH_DEADLINE } if opts.MaxClosedClients == 0 { opts.MaxClosedClients = DEFAULT_MAX_CLOSED_CLIENTS } if opts.LameDuckDuration == 0 { opts.LameDuckDuration = DEFAULT_LAME_DUCK_DURATION } if opts.Gateway.Port != 0 { if opts.Gateway.Host == "" { opts.Gateway.Host = DEFAULT_HOST } if opts.Gateway.TLSTimeout == 0 { opts.Gateway.TLSTimeout = float64(TLS_TIMEOUT) / float64(time.Second) } if opts.Gateway.AuthTimeout == 0 { opts.Gateway.AuthTimeout = float64(AUTH_TIMEOUT) / float64(time.Second) } } if opts.ConnectErrorReports == 0 { opts.ConnectErrorReports = DEFAULT_CONNECT_ERROR_REPORTS } if opts.ReconnectErrorReports == 0 { opts.ReconnectErrorReports = DEFAULT_RECONNECT_ERROR_REPORTS } } // ConfigureOptions accepts a flag set and augment it with NATS Server // specific flags. On success, an options structure is returned configured // based on the selected flags and/or configuration file. // The command line options take precedence to the ones in the configuration file. func ConfigureOptions(fs *flag.FlagSet, args []string, printVersion, printHelp, printTLSHelp func()) (*Options, error) { opts := &Options{} var ( showVersion bool showHelp bool showTLSHelp bool signal string configFile string dbgAndTrace bool err error ) fs.BoolVar(&showHelp, "h", false, "Show this message.") fs.BoolVar(&showHelp, "help", false, "Show this message.") fs.IntVar(&opts.Port, "port", 0, "Port to listen on.") fs.IntVar(&opts.Port, "p", 0, "Port to listen on.") fs.StringVar(&opts.Host, "addr", "", "Network host to listen on.") fs.StringVar(&opts.Host, "a", "", "Network host to listen on.") fs.StringVar(&opts.Host, "net", "", "Network host to listen on.") fs.StringVar(&opts.ClientAdvertise, "client_advertise", "", "Client URL to advertise to other servers.") fs.BoolVar(&opts.Debug, "D", false, "Enable Debug logging.") fs.BoolVar(&opts.Debug, "debug", false, "Enable Debug logging.") fs.BoolVar(&opts.Trace, "V", false, "Enable Trace logging.") fs.BoolVar(&opts.Trace, "trace", false, "Enable Trace logging.") fs.BoolVar(&dbgAndTrace, "DV", false, "Enable Debug and Trace logging.") fs.BoolVar(&opts.Logtime, "T", true, "Timestamp log entries.") fs.BoolVar(&opts.Logtime, "logtime", true, "Timestamp log entries.") fs.StringVar(&opts.Username, "user", "", "Username required for connection.") fs.StringVar(&opts.Password, "pass", "", "Password required for connection.") fs.StringVar(&opts.Authorization, "auth", "", "Authorization token required for connection.") fs.IntVar(&opts.HTTPPort, "m", 0, "HTTP Port for /varz, /connz endpoints.") fs.IntVar(&opts.HTTPPort, "http_port", 0, "HTTP Port for /varz, /connz endpoints.") fs.IntVar(&opts.HTTPSPort, "ms", 0, "HTTPS Port for /varz, /connz endpoints.") fs.IntVar(&opts.HTTPSPort, "https_port", 0, "HTTPS Port for /varz, /connz endpoints.") fs.StringVar(&configFile, "c", "", "Configuration file.") fs.StringVar(&configFile, "config", "", "Configuration file.") fs.BoolVar(&opts.CheckConfig, "t", false, "Check configuration and exit.") fs.StringVar(&signal, "sl", "", "Send signal to nats-server process (stop, quit, reopen, reload).") fs.StringVar(&signal, "signal", "", "Send signal to nats-server process (stop, quit, reopen, reload).") fs.StringVar(&opts.PidFile, "P", "", "File to store process pid.") fs.StringVar(&opts.PidFile, "pid", "", "File to store process pid.") fs.StringVar(&opts.PortsFileDir, "ports_file_dir", "", "Creates a ports file in the specified directory (<executable_name>_<pid>.ports).") fs.StringVar(&opts.LogFile, "l", "", "File to store logging output.") fs.StringVar(&opts.LogFile, "log", "", "File to store logging output.") fs.Int64Var(&opts.LogSizeLimit, "log_size_limit", 0, "Logfile size limit being auto-rotated") fs.BoolVar(&opts.Syslog, "s", false, "Enable syslog as log method.") fs.BoolVar(&opts.Syslog, "syslog", false, "Enable syslog as log method.") fs.StringVar(&opts.RemoteSyslog, "r", "", "Syslog server addr (udp://127.0.0.1:514).") fs.StringVar(&opts.RemoteSyslog, "remote_syslog", "", "Syslog server addr (udp://127.0.0.1:514).") fs.BoolVar(&showVersion, "version", false, "Print version information.") fs.BoolVar(&showVersion, "v", false, "Print version information.") fs.IntVar(&opts.ProfPort, "profile", 0, "Profiling HTTP port.") fs.StringVar(&opts.RoutesStr, "routes", "", "Routes to actively solicit a connection.") fs.StringVar(&opts.Cluster.ListenStr, "cluster", "", "Cluster url from which members can solicit routes.") fs.StringVar(&opts.Cluster.ListenStr, "cluster_listen", "", "Cluster url from which members can solicit routes.") fs.StringVar(&opts.Cluster.Advertise, "cluster_advertise", "", "Cluster URL to advertise to other servers.") fs.BoolVar(&opts.Cluster.NoAdvertise, "no_advertise", false, "Advertise known cluster IPs to clients.") fs.IntVar(&opts.Cluster.ConnectRetries, "connect_retries", 0, "For implicit routes, number of connect retries.") fs.BoolVar(&showTLSHelp, "help_tls", false, "TLS help.") fs.BoolVar(&opts.TLS, "tls", false, "Enable TLS.") fs.BoolVar(&opts.TLSVerify, "tlsverify", false, "Enable TLS with client verification.") fs.StringVar(&opts.TLSCert, "tlscert", "", "Server certificate file.") fs.StringVar(&opts.TLSKey, "tlskey", "", "Private key for server certificate.") fs.StringVar(&opts.TLSCaCert, "tlscacert", "", "Client certificate CA for verification.") fs.IntVar(&opts.MaxTracedMsgLen, "max_traced_msg_len", 0, "Maximum printable length for traced messages. 0 for unlimited.") fs.BoolVar(&opts.JetStream, "js", false, "Enable JetStream.") fs.BoolVar(&opts.JetStream, "jetstream", false, "Enable JetStream.") fs.StringVar(&opts.StoreDir, "sd", "", "Storage directory.") fs.StringVar(&opts.StoreDir, "store_dir", "", "Storage directory.") // The flags definition above set "default" values to some of the options. // Calling Parse() here will override the default options with any value // specified from the command line. This is ok. We will then update the // options with the content of the configuration file (if present), and then, // call Parse() again to override the default+config with command line values. // Calling Parse() before processing config file is necessary since configFile // itself is a command line argument, and also Parse() is required in order // to know if user wants simply to show "help" or "version", etc... if err := fs.Parse(args); err != nil { return nil, err } if showVersion { printVersion() return nil, nil } if showHelp { printHelp() return nil, nil } if showTLSHelp { printTLSHelp() return nil, nil } // Process args looking for non-flag options, // 'version' and 'help' only for now showVersion, showHelp, err = ProcessCommandLineArgs(fs) if err != nil { return nil, err } else if showVersion { printVersion() return nil, nil } else if showHelp { printHelp() return nil, nil } // Snapshot flag options. FlagSnapshot = opts.Clone() // Keep track of the boolean flags that were explicitly set with their value. fs.Visit(func(f *flag.Flag) { switch f.Name { case "DV": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Debug", dbgAndTrace) trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Trace", dbgAndTrace) case "D": fallthrough case "debug": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Debug", FlagSnapshot.Debug) case "V": fallthrough case "trace": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Trace", FlagSnapshot.Trace) case "T": fallthrough case "logtime": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Logtime", FlagSnapshot.Logtime) case "s": fallthrough case "syslog": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Syslog", FlagSnapshot.Syslog) case "no_advertise": trackExplicitVal(FlagSnapshot, &FlagSnapshot.inCmdLine, "Cluster.NoAdvertise", FlagSnapshot.Cluster.NoAdvertise) } }) // Process signal control. if signal != "" { if err := processSignal(signal); err != nil { return nil, err } } // Parse config if given if configFile != "" { // This will update the options with values from the config file. err := opts.ProcessConfigFile(configFile) if err != nil { if opts.CheckConfig { return nil, err } // If only warnings then can still continue. if cerr, ok := err.(*processConfigErr); ok && len(cerr.Errors()) == 0 { fmt.Fprint(os.Stderr, err) return opts, nil } return nil, err } else if opts.CheckConfig { // Report configuration file syntax test was successful and exit. return opts, nil } // Call this again to override config file options with options from command line. // Note: We don't need to check error here since if there was an error, it would // have been caught the first time this function was called (after setting up the // flags). fs.Parse(args) } else if opts.CheckConfig { return nil, fmt.Errorf("must specify [-c, --config] option to check configuration file syntax") } // Special handling of some flags var ( flagErr error tlsDisabled bool tlsOverride bool ) fs.Visit(func(f *flag.Flag) { // short-circuit if an error was encountered if flagErr != nil { return } if strings.HasPrefix(f.Name, "tls") { if f.Name == "tls" { if !opts.TLS { // User has specified "-tls=false", we need to disable TLS opts.TLSConfig = nil tlsDisabled = true tlsOverride = false return } tlsOverride = true } else if !tlsDisabled { tlsOverride = true } } else { switch f.Name { case "DV": // Check value to support -DV=false opts.Trace, opts.Debug = dbgAndTrace, dbgAndTrace case "cluster", "cluster_listen": // Override cluster config if explicitly set via flags. flagErr = overrideCluster(opts) case "routes": // Keep in mind that the flag has updated opts.RoutesStr at this point. if opts.RoutesStr == "" { // Set routes array to nil since routes string is empty opts.Routes = nil return } routeUrls := RoutesFromStr(opts.RoutesStr) opts.Routes = routeUrls } } }) if flagErr != nil { return nil, flagErr } // This will be true if some of the `-tls` params have been set and // `-tls=false` has not been set. if tlsOverride { if err := overrideTLS(opts); err != nil { return nil, err } } // If we don't have cluster defined in the configuration // file and no cluster listen string override, but we do // have a routes override, we need to report misconfiguration. if opts.RoutesStr != "" && opts.Cluster.ListenStr == "" && opts.Cluster.Host == "" && opts.Cluster.Port == 0 { return nil, errors.New("solicited routes require cluster capabilities, e.g. --cluster") } return opts, nil } // overrideTLS is called when at least "-tls=true" has been set. func overrideTLS(opts *Options) error { if opts.TLSCert == "" { return errors.New("TLS Server certificate must be present and valid") } if opts.TLSKey == "" { return errors.New("TLS Server private key must be present and valid") } tc := TLSConfigOpts{} tc.CertFile = opts.TLSCert tc.KeyFile = opts.TLSKey tc.CaFile = opts.TLSCaCert tc.Verify = opts.TLSVerify var err error opts.TLSConfig, err = GenTLSConfig(&tc) return err } // overrideCluster updates Options.Cluster if that flag "cluster" (or "cluster_listen") // has explicitly be set in the command line. If it is set to empty string, it will // clear the Cluster options. func overrideCluster(opts *Options) error { if opts.Cluster.ListenStr == "" { // This one is enough to disable clustering. opts.Cluster.Port = 0 return nil } // -1 will fail url.Parse, so if we have -1, change it to // 0, and then after parse, replace the port with -1 so we get // automatic port allocation wantsRandom := false if strings.HasSuffix(opts.Cluster.ListenStr, ":-1") { wantsRandom = true cls := fmt.Sprintf("%s:0", opts.Cluster.ListenStr[0:len(opts.Cluster.ListenStr)-3]) opts.Cluster.ListenStr = cls } clusterURL, err := url.Parse(opts.Cluster.ListenStr) if err != nil { return err } h, p, err := net.SplitHostPort(clusterURL.Host) if err != nil { return err } if wantsRandom { p = "-1" } opts.Cluster.Host = h _, err = fmt.Sscan(p, &opts.Cluster.Port) if err != nil { return err } if clusterURL.User != nil { pass, hasPassword := clusterURL.User.Password() if !hasPassword { return errors.New("expected cluster password to be set") } opts.Cluster.Password = pass user := clusterURL.User.Username() opts.Cluster.Username = user } else { // Since we override from flag and there is no user/pwd, make // sure we clear what we may have gotten from config file. opts.Cluster.Username = "" opts.Cluster.Password = "" } return nil } func processSignal(signal string) error { var ( pid string commandAndPid = strings.Split(signal, "=") ) if l := len(commandAndPid); l == 2 { pid = maybeReadPidFile(commandAndPid[1]) } else if l > 2 { return fmt.Errorf("invalid signal parameters: %v", commandAndPid[2:]) } if err := ProcessSignal(Command(commandAndPid[0]), pid); err != nil { return err } os.Exit(0) return nil } // maybeReadPidFile returns a PID or Windows service name obtained via the following method: // 1. Try to open a file with path "pidStr" (absolute or relative). // 2. If such a file exists and can be read, return its contents. // 3. Otherwise, return the original "pidStr" string. func maybeReadPidFile(pidStr string) string { if b, err := ioutil.ReadFile(pidStr); err == nil { return string(b) } return pidStr } func homeDir() (string, error) { if runtime.GOOS == "windows" { homeDrive, homePath := os.Getenv("HOMEDRIVE"), os.Getenv("HOMEPATH") userProfile := os.Getenv("USERPROFILE") home := filepath.Join(homeDrive, homePath) if homeDrive == "" || homePath == "" { if userProfile == "" { return "", errors.New("nats: failed to get home dir, require %HOMEDRIVE% and %HOMEPATH% or %USERPROFILE%") } home = userProfile } return home, nil } home := os.Getenv("HOME") if home == "" { return "", errors.New("failed to get home dir, require $HOME") } return home, nil } func expandPath(p string) (string, error) { p = os.ExpandEnv(p) if !strings.HasPrefix(p, "~") { return p, nil } home, err := homeDir() if err != nil { return "", err } return filepath.Join(home, p[1:]), nil }
1
9,908
Maybe rename this to JetStreamStoreDir?
nats-io-nats-server
go
@@ -52,7 +52,8 @@ var _ = Describe("UsageReporter with mocked URL and short interval", func() { httpHandler = &requestRecorder{} go func() { defer GinkgoRecover() - http.Serve(tcpListener, httpHandler) + // TODO: Investigate why this call sometimes returns an error. + _ = http.Serve(tcpListener, httpHandler) }() // Channels to send data to the UsageReporter.
1
// Copyright (c) 2016-2018 Tigera, Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package usagerep import ( "net/url" "time" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "context" "fmt" "net" "net/http" "sync" "github.com/projectcalico/felix/buildinfo" "github.com/projectcalico/felix/calc" ) const expectedNumberOfURLParams = 12 // These tests start a local HTTP server on a random port and tell the usage reporter to // connect to it. Then we can check that it correctly makes HTTP requests at the right times. var _ = Describe("UsageReporter with mocked URL and short interval", func() { var u *UsageReporter var tcpListener net.Listener var httpHandler *requestRecorder var ctx context.Context var cancel context.CancelFunc var statsUpdateC chan calc.StatsUpdate var configUpdateC chan map[string]string BeforeEach(func() { // Open a listener on a random local port. var err error tcpListener, err = net.Listen("tcp", ":0") Expect(err).NotTo(HaveOccurred()) httpHandler = &requestRecorder{} go func() { defer GinkgoRecover() http.Serve(tcpListener, httpHandler) }() // Channels to send data to the UsageReporter. statsUpdateC = make(chan calc.StatsUpdate) configUpdateC = make(chan map[string]string) // Create a usage reporter and override its base URL and initial interval. u = New(500*time.Millisecond, 1*time.Second, statsUpdateC, configUpdateC) port := tcpListener.Addr().(*net.TCPAddr).Port u.BaseURL = fmt.Sprintf("http://localhost:%d/UsageCheck/calicoVersionCheck?", port) ctx, cancel = context.WithCancel(context.Background()) go u.PeriodicallyReportUsage(ctx) }) AfterEach(func() { cancel() tcpListener.Close() }) It("should not check in before receiving config/stats", func() { Consistently(httpHandler.GetRequestURIs, "2s").Should(BeEmpty()) }) Context("after sending config", func() { sendConfig := func() { configUpdateC <- map[string]string{ "ClusterGUID": "someguid", "ClusterType": "openstack,k8s,kdd", "CalicoVersion": "v2.6.3", } } BeforeEach(func() { sendConfig() }) It("should not check in before receiving stats", func() { Consistently(httpHandler.GetRequestURIs, "2s").Should(BeEmpty()) }) Context("after sending stats", func() { sendStats := func() { statsUpdateC <- calc.StatsUpdate{ NumHosts: 1, NumHostEndpoints: 2, NumWorkloadEndpoints: 3, NumPolicies: 4, NumProfiles: 5, NumALPPolicies: 6, } } BeforeEach(func() { sendStats() }) It("should do first check ins correctly", func() { By("checking in within 2s") startTime := time.Now() Eventually(httpHandler.GetRequestURIs, "2s", "100ms").Should(HaveLen(1)) By("waiting until after the initial delay") Expect(time.Since(startTime)).To(BeNumerically(">=", 500*time.Millisecond)) By("including correct URL parameters") uri := httpHandler.GetRequestURIs()[0] url, err := url.Parse(uri) Expect(err).NotTo(HaveOccurred()) Expect(url.Path).To(Equal("/UsageCheck/calicoVersionCheck")) q := url.Query() Expect(q).To(HaveLen(expectedNumberOfURLParams), "unexpected number of URL parameters") Expect(q.Get("guid")).To(Equal("someguid")) Expect(q.Get("type")).To(Equal("openstack,k8s,kdd")) Expect(q.Get("cal_ver")).To(Equal("v2.6.3")) Expect(q.Get("alp")).To(Equal("false")) Expect(q.Get("size")).To(Equal("1")) Expect(q.Get("heps")).To(Equal("2")) Expect(q.Get("weps")).To(Equal("3")) Expect(q.Get("policies")).To(Equal("4")) Expect(q.Get("profiles")).To(Equal("5")) Expect(q.Get("alp_policies")).To(Equal("6")) By("checking in again") Eventually(httpHandler.GetRequestURIs, "2s", "100ms").Should(HaveLen(2)) By("waiting until at least initial delay + 90% (due to jitter) of interval for second check in") Expect(time.Since(startTime)).To(BeNumerically(">=", 1400*time.Millisecond)) }) It("should not block the channels while doing initial delay", func() { startTime := time.Now() // We created the channel as a blocking channel so, if we can send a few updates, // we know that the main loop is processing them sendStats() sendStats() sendStats() sendConfig() sendConfig() sendConfig() Expect(time.Since(startTime)).To(BeNumerically("<", 100*time.Millisecond)) }) Context("after first report, and sending in config and stat updates", func() { BeforeEach(func() { Eventually(httpHandler.GetRequestURIs, "2s", "100ms").Should(HaveLen(1)) statsUpdateC <- calc.StatsUpdate{ NumHosts: 10, NumHostEndpoints: 20, NumWorkloadEndpoints: 30, NumPolicies: 40, NumProfiles: 50, NumALPPolicies: 60, } configUpdateC <- map[string]string{ "ClusterGUID": "someguid2", "ClusterType": "openstack,k8s,kdd,typha", "CalicoVersion": "v3.0.0", "PolicySyncPathPrefix": "/var/run/nodeagent", } }) It("should do second check in correctly", func() { By("checking in within 2s") Eventually(httpHandler.GetRequestURIs, "2s", "100ms").Should(HaveLen(2)) By("including correct URL parameters") uri := httpHandler.GetRequestURIs()[1] url, err := url.Parse(uri) Expect(err).NotTo(HaveOccurred()) q := url.Query() Expect(q).To(HaveLen(expectedNumberOfURLParams), "unexpected number of URL parameters") Expect(q.Get("guid")).To(Equal("someguid2")) Expect(q.Get("type")).To(Equal("openstack,k8s,kdd,typha")) Expect(q.Get("cal_ver")).To(Equal("v3.0.0")) Expect(q.Get("alp")).To(Equal("true")) Expect(q.Get("size")).To(Equal("10")) Expect(q.Get("heps")).To(Equal("20")) Expect(q.Get("weps")).To(Equal("30")) Expect(q.Get("policies")).To(Equal("40")) Expect(q.Get("profiles")).To(Equal("50")) Expect(q.Get("alp_policies")).To(Equal("60")) }) }) }) }) }) type requestRecorder struct { lock sync.Mutex requestsReceived []string } func (h *requestRecorder) ServeHTTP(resp http.ResponseWriter, req *http.Request) { h.lock.Lock() defer h.lock.Unlock() h.requestsReceived = append(h.requestsReceived, req.RequestURI) resp.Write([]byte(`{"usage_warning": "Warning!"}`)) } func (h *requestRecorder) GetRequestURIs() []string { h.lock.Lock() defer h.lock.Unlock() var result []string for _, r := range h.requestsReceived { result = append(result, r) } return result } // These tests create a usage reporter but they don't start it. Instead they validate its // internal calculation methods and and the default configuration. var _ = Describe("UsageReporter with default URL", func() { var u *UsageReporter BeforeEach(func() { u = New(5*time.Minute, 24*time.Hour, nil, nil) }) It("should calculate correct URL mainline", func() { rawURL := u.calculateURL("theguid", "atype", "testVer", true, calc.StatsUpdate{ NumHostEndpoints: 123, NumWorkloadEndpoints: 234, NumHosts: 10, }) url, err := url.Parse(rawURL) Expect(err).NotTo(HaveOccurred()) q := url.Query() Expect(q).To(HaveLen(expectedNumberOfURLParams), "unexpected number of URL parameters") Expect(q.Get("guid")).To(Equal("theguid")) Expect(q.Get("type")).To(Equal("atype")) Expect(q.Get("cal_ver")).To(Equal("testVer")) Expect(q.Get("alp")).To(Equal("true")) Expect(q.Get("size")).To(Equal("10")) Expect(q.Get("weps")).To(Equal("234")) Expect(q.Get("heps")).To(Equal("123")) Expect(q.Get("version")).To(Equal(buildinfo.GitVersion)) Expect(q.Get("rev")).To(Equal(buildinfo.GitRevision)) Expect(url.Host).To(Equal("usage.projectcalico.org")) Expect(url.Scheme).To(Equal("https")) Expect(url.Path).To(Equal("/UsageCheck/calicoVersionCheck")) }) It("should default cluster type, GUID, and Calico Version", func() { rawURL := u.calculateURL("", "", "", false, calc.StatsUpdate{ NumHostEndpoints: 123, NumWorkloadEndpoints: 234, NumHosts: 10, }) url, err := url.Parse(rawURL) Expect(err).NotTo(HaveOccurred()) q := url.Query() Expect(q).To(HaveLen(expectedNumberOfURLParams), "unexpected number of URL parameters") Expect(q.Get("guid")).To(Equal("baddecaf")) Expect(q.Get("type")).To(Equal("unknown")) Expect(q.Get("cal_ver")).To(Equal("unknown")) Expect(q.Get("alp")).To(Equal("false")) }) It("should delay at least 5 minutes", func() { Expect(u.calculateInitialDelay(0)).To(BeNumerically(">=", 5*time.Minute)) Expect(u.calculateInitialDelay(1)).To(BeNumerically(">=", 5*time.Minute)) Expect(u.calculateInitialDelay(1000)).To(BeNumerically(">=", 5*time.Minute)) }) It("should delay at most 10000 seconds", func() { Expect(u.calculateInitialDelay(10000)).To(BeNumerically("<=", 5*time.Minute+10000*time.Second)) Expect(u.calculateInitialDelay(100000)).To(BeNumerically("<=", 5*time.Minute+10000*time.Second)) Expect(u.calculateInitialDelay(1000000)).To(BeNumerically("<=", 5*time.Minute+10000*time.Second)) Expect(u.calculateInitialDelay(10000000)).To(BeNumerically("<=", 5*time.Minute+10000*time.Second)) }) It("should have a random component", func() { firstDelay := u.calculateInitialDelay(1000) for i := 0; i < 10; i++ { if u.calculateInitialDelay(1000) != firstDelay { return // Success } } Fail("Generated 10 delays but they were all the same") }) It("should have an average close to expected value", func() { var total time.Duration // Give it a high but bounded number of iterations to converge. for i := int64(0); i < 100000; i++ { total += u.calculateInitialDelay(60) if i > 100 { average := time.Duration(int64(total) / (i + 1)) // Delay should an average of 0.5s per host so the average should // be close to 5min30s. if average > (5*time.Minute+20*time.Second) && average < (5*time.Minute+40*time.Second) { // Pass! return } } } Fail("Average of initial delay failed to converge after many iterations") }) })
1
17,213
You can start by logging out the error
projectcalico-felix
c
@@ -31,7 +31,7 @@ namespace Nethermind.JsonRpc.Modules.Eth.GasPrice { private readonly IBlockFinder _blockFinder; private readonly UInt256 _minGasPrice; - public UInt256 FallbackGasPrice => LastGasPrice ?? _minGasPrice; + public UInt256 FallbackGasPrice(in UInt256? baseFeePerGas = null) => LastGasPrice ?? GetMinimumGasPrice(baseFeePerGas ?? UInt256.Zero); private ISpecProvider SpecProvider { get; } public UInt256? LastGasPrice { get; set; } public Block? LastHeadBlock { get; set; }
1
// Copyright (c) 2021 Demerzel Solutions Limited // This file is part of the Nethermind library. // // The Nethermind library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The Nethermind library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the Nethermind. If not, see <http://www.gnu.org/licenses/>. // using System; using System.Collections.Generic; using System.Linq; using Nethermind.Blockchain.Find; using Nethermind.Consensus; using Nethermind.Core; using Nethermind.Core.Extensions; using Nethermind.Core.Specs; using Nethermind.Int256; namespace Nethermind.JsonRpc.Modules.Eth.GasPrice { public class GasPriceOracle : IGasPriceOracle { private readonly IBlockFinder _blockFinder; private readonly UInt256 _minGasPrice; public UInt256 FallbackGasPrice => LastGasPrice ?? _minGasPrice; private ISpecProvider SpecProvider { get; } public UInt256? LastGasPrice { get; set; } public Block? LastHeadBlock { get; set; } public UInt256 IgnoreUnder { get; set; } = EthGasPriceConstants.DefaultIgnoreUnder; public int BlockLimit { get; set; } = EthGasPriceConstants.DefaultBlocksLimit; private int SoftTxThreshold => BlockLimit * 2; private readonly UInt256 _defaultMinGasPriceMultiplier = 110; public GasPriceOracle( IBlockFinder blockFinder, ISpecProvider specProvider, UInt256? minGasPrice = null) { _blockFinder = blockFinder; _minGasPrice = _defaultMinGasPriceMultiplier * (minGasPrice ?? new MiningConfig().MinGasPrice) / 100; SpecProvider = specProvider; } public UInt256 GetGasPriceEstimate() { Block? headBlock = _blockFinder.Head; if (headBlock is null) { return FallbackGasPrice; } if (LastGasPrice is not null && LastHeadBlock!.Hash == headBlock!.Hash) { return LastGasPrice.Value; } LastHeadBlock = headBlock; IEnumerable<UInt256> txGasPrices = GetSortedGasPricesFromRecentBlocks(headBlock.Number); UInt256? gasPriceEstimate = GetGasPriceAtPercentile(txGasPrices.ToList()); gasPriceEstimate = UInt256.Min((UInt256) gasPriceEstimate!, EthGasPriceConstants.MaxGasPrice); LastGasPrice = gasPriceEstimate; return (UInt256) gasPriceEstimate!; } private IEnumerable<UInt256> GetSortedGasPricesFromRecentBlocks(long blockNumber) => GetGasPricesFromRecentBlocks(blockNumber).OrderBy(gasPrice => gasPrice); public IEnumerable<UInt256> GetGasPricesFromRecentBlocks(long blockNumber) { IEnumerable<Block> GetBlocks(long currentBlockNumber) { while (currentBlockNumber >= 0) { yield return _blockFinder.FindBlock(currentBlockNumber)!; currentBlockNumber--; } } return GetGasPricesFromRecentBlocks(GetBlocks(blockNumber), BlockLimit); } private IEnumerable<UInt256> GetGasPricesFromRecentBlocks(IEnumerable<Block> blocks, int blocksToGoBack) { int txCount = 0; foreach (Block currentBlock in blocks) { Transaction[] currentBlockTransactions = currentBlock.Transactions; int txFromCurrentBlock = 0; bool eip1559Enabled = SpecProvider.GetSpec(currentBlock.Number).IsEip1559Enabled; UInt256 baseFee = currentBlock.BaseFeePerGas; IEnumerable<UInt256> effectiveGasPrices = currentBlockTransactions.Where(tx => tx.SenderAddress != currentBlock.Beneficiary) .Select(tx => tx.CalculateEffectiveGasPrice(eip1559Enabled, baseFee)) .Where(g => g >= IgnoreUnder) .OrderBy(g => g); foreach (UInt256 gasPrice in effectiveGasPrices) { yield return gasPrice; txFromCurrentBlock++; txCount++; if (txFromCurrentBlock >= EthGasPriceConstants.TxLimitFromABlock) { break; } } if (txFromCurrentBlock == 0) { yield return FallbackGasPrice; } if (txFromCurrentBlock > 1 || txCount + blocksToGoBack >= SoftTxThreshold) { blocksToGoBack--; } if (blocksToGoBack < 1) { break; } } } private UInt256 GetGasPriceAtPercentile(List<UInt256> txGasPriceList) { int roundedIndex = GetRoundedIndexAtPercentile(txGasPriceList.Count); return roundedIndex < 0 ? _minGasPrice : txGasPriceList[roundedIndex]; } private static int GetRoundedIndexAtPercentile(int count) { int lastIndex = count - 1; float percentileOfLastIndex = lastIndex * ((float)EthGasPriceConstants.PercentileOfSortedTxs / 100); int roundedIndex = (int) Math.Round(percentileOfLastIndex); return roundedIndex; } } }
1
26,373
In general, what is the reason for the fallback price to be public? If it is public, why does it take baseFeePerGas instead of a block header?
NethermindEth-nethermind
.cs
@@ -80,7 +80,7 @@ import java.util.function.*; * @param <V> Value type * @author Daniel Dietrich, Ruslan Sennov */ -public interface Map<K, V> extends Traversable<Tuple2<K, V>>, Function1<K, V>, PartialFunction<K, V>, Serializable { +public interface Map<K, V> extends Traversable<Tuple2<K, V>>, PartialFunction<K, V>, Serializable { long serialVersionUID = 1L;
1
/* __ __ __ __ __ ___ * \ \ / / \ \ / / __/ * \ \/ / /\ \ \/ / / * \____/__/ \__\____/__/.ɪᴏ * ᶜᵒᵖʸʳᶦᵍʰᵗ ᵇʸ ᵛᵃᵛʳ ⁻ ˡᶦᶜᵉⁿˢᵉᵈ ᵘⁿᵈᵉʳ ᵗʰᵉ ᵃᵖᵃᶜʰᵉ ˡᶦᶜᵉⁿˢᵉ ᵛᵉʳˢᶦᵒⁿ ᵗʷᵒ ᵈᵒᵗ ᶻᵉʳᵒ */ package io.vavr.collection; import io.vavr.*; import io.vavr.control.Option; import java.io.Serializable; import java.util.*; import java.util.function.*; /** * An immutable {@code Map} interface. * * <p> * Basic operations: * * <ul> * <li>{@link #containsKey(Object)}</li> * <li>{@link #containsValue(Object)}</li> * <li>{@link #get(Object)}</li> * <li>{@link #keySet()}</li> * <li>{@link #merge(Map)}</li> * <li>{@link #merge(Map, BiFunction)}</li> * <li>{@link #put(Object, Object)}</li> * <li>{@link #put(Tuple2)}</li> * <li>{@link #put(Object, Object, BiFunction)}</li> * <li>{@link #put(Tuple2, BiFunction)}</li> * <li>{@link #values()}</li> * </ul> * * Conversion: * * <ul> * <li>{@link #toJavaMap()}</li> * </ul> * * Filtering: * * <ul> * <li>{@link #filter(BiPredicate)}</li> * <li>{@link #filterKeys(Predicate)}</li> * <li>{@link #filterValues(Predicate)}</li> * <li>{@link #remove(Object)}</li> * <li>{@link #removeAll(BiPredicate)}</li> * <li>{@link #removeAll(Iterable)}</li> * <li>{@link #removeKeys(Predicate)}</li> * <li>{@link #removeValues(Predicate)}</li> * </ul> * * Iteration: * * <ul> * <li>{@link #forEach(BiConsumer)}</li> * <li>{@link #iterator(BiFunction)}</li> * </ul> * * Transformation: * * <ul> * <li>{@link #bimap(Function, Function)}</li> * <li>{@link #flatMap(BiFunction)}</li> * <li>{@link #lift()}</li> * <li>{@link #map(BiFunction)}</li> * <li>{@link #mapKeys(Function)}</li> * <li>{@link #mapKeys(Function, BiFunction)}</li> * <li>{@link #mapValues(Function)}</li> * <li>{@link #transform(Function)}</li> * <li>{@link #unzip(BiFunction)}</li> * <li>{@link #unzip3(BiFunction)}</li> * <li>{@link #withDefault(Function)}</li> * <li>{@link #withDefaultValue(Object)}</li> * </ul> * * @param <K> Key type * @param <V> Value type * @author Daniel Dietrich, Ruslan Sennov */ public interface Map<K, V> extends Traversable<Tuple2<K, V>>, Function1<K, V>, PartialFunction<K, V>, Serializable { long serialVersionUID = 1L; /** * Narrows a widened {@code Map<? extends K, ? extends V>} to {@code Map<K, V>} * by performing a type-safe cast. This is eligible because immutable/read-only * collections are covariant. * * @param map A {@code Map}. * @param <K> Key type * @param <V> Value type * @return the given {@code map} instance as narrowed type {@code Map<K, V>}. */ @SuppressWarnings("unchecked") static <K, V> Map<K, V> narrow(Map<? extends K, ? extends V> map) { return (Map<K, V>) map; } /** * Convenience factory method to create a key/value pair. * <p> * If imported statically, this method allows to create a {@link Map} with arbitrary entries in a readable and * type-safe way, e.g.: * <pre> * {@code * * HashMap.ofEntries( * entry(k1, v1), * entry(k2, v2), * entry(k3, v3) * ); * * } * </pre> * * @param key the entry's key * @param value the entry's value * @param <K> Key type * @param <V> Value type * @return a key/value pair */ static <K, V> Tuple2<K, V> entry(K key, V value) { return Tuple.of(key, value); } @Override default V apply(K key) { return get(key).getOrElseThrow(() -> new NoSuchElementException(String.valueOf(key))); } @Override default <R> Seq<R> collect(PartialFunction<? super Tuple2<K, V>, ? extends R> partialFunction) { return io.vavr.collection.Vector.ofAll(iterator().<R> collect(partialFunction)); } /** * Maps this {@code Map} to a new {@code Map} with different component type by applying a function to its elements. * * @param <K2> key's component type of the map result * @param <V2> value's component type of the map result * @param keyMapper a {@code Function} that maps the keys of type {@code K} to keys of type {@code K2} * @param valueMapper a {@code Function} that the values of type {@code V} to values of type {@code V2} * @return a new {@code Map} * @throws NullPointerException if {@code keyMapper} or {@code valueMapper} is null */ <K2, V2> Map<K2, V2> bimap(Function<? super K, ? extends K2> keyMapper, Function<? super V, ? extends V2> valueMapper); @Override default boolean contains(Tuple2<K, V> element) { return get(element._1).map(v -> Objects.equals(v, element._2)).getOrElse(false); } /** * If the specified key is not already associated with a value, * attempts to compute its value using the given mapping * function and enters it into this map. * * @param key key whose presence in this map is to be tested * @param mappingFunction mapping function * @return the {@link Tuple2} of current or modified map and existing or computed value associated with the specified key */ Tuple2<V, ? extends Map<K, V>> computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction); /** * If the value for the specified key is present, attempts to * compute a new mapping given the key and its current mapped value. * * @param key key whose presence in this map is to be tested * @param remappingFunction remapping function * @return the {@link Tuple2} of current or modified map and the {@code Some} of the value associated * with the specified key, or {@code None} if none */ Tuple2<Option<V>, ? extends Map<K, V>> computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction); /** * Returns <code>true</code> if this map contains a mapping for the specified key. * * @param key key whose presence in this map is to be tested * @return <code>true</code> if this map contains a mapping for the specified key */ boolean containsKey(K key); /** * Returns <code>true</code> if this map maps one or more keys to the * specified value. This operation will require time linear in the map size. * * @param value value whose presence in this map is to be tested * @return <code>true</code> if this map maps one or more keys to the * specified value */ default boolean containsValue(V value) { return iterator().map(Tuple2::_2).contains(value); } /** * Returns a new Map consisting of all elements which satisfy the given predicate. * * @param predicate the predicate used to test elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> filter(BiPredicate<? super K, ? super V> predicate); /** * Returns a new Map consisting of all elements with keys which satisfy the given predicate. * * @param predicate the predicate used to test keys of elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> filterKeys(Predicate<? super K> predicate); /** * Returns a new Map consisting of all elements with values which satisfy the given predicate. * * @param predicate the predicate used to test values of elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> filterValues(Predicate<? super V> predicate); /** * FlatMaps this {@code Map} to a new {@code Map} with different component type. * * @param mapper A mapper * @param <K2> key's component type of the mapped {@code Map} * @param <V2> value's component type of the mapped {@code Map} * @return A new {@code Map}. * @throws NullPointerException if {@code mapper} is null */ <K2, V2> Map<K2, V2> flatMap(BiFunction<? super K, ? super V, ? extends Iterable<Tuple2<K2, V2>>> mapper); /** * Flat-maps this entries to a sequence of values. * <p> * Please use {@link #flatMap(BiFunction)} if the result should be a {@code Map} * * @param mapper A mapper * @param <U> Component type * @return A sequence of flat-mapped values. */ @SuppressWarnings("unchecked") @Override default <U> Seq<U> flatMap(Function<? super Tuple2<K, V>, ? extends Iterable<? extends U>> mapper) { Objects.requireNonNull(mapper, "mapper is null"); // don't remove cast, doesn't compile in Eclipse without it return (Seq<U>) iterator().flatMap(mapper).toStream(); } @Override default <U> U foldRight(U zero, BiFunction<? super Tuple2<K, V>, ? super U, ? extends U> f) { Objects.requireNonNull(f, "f is null"); return iterator().foldRight(zero, f); } /** * Performs an action on key, value pair. * * @param action A {@code BiConsumer} * @throws NullPointerException if {@code action} is null */ default void forEach(BiConsumer<K, V> action) { Objects.requireNonNull(action, "action is null"); for (Tuple2<K, V> t : this) { action.accept(t._1, t._2); } } /** * Returns the {@code Some} of value to which the specified key * is mapped, or {@code None} if this map contains no mapping for the key. * * @param key the key whose associated value is to be returned * @return the {@code Some} of value to which the specified key * is mapped, or {@code None} if this map contains no mapping * for the key */ Option<V> get(K key); /** * Returns the value associated with a key, or a default value if the key is not contained in the map. * * @param key the key * @param defaultValue a default value * @return the value associated with key if it exists, otherwise the default value. */ V getOrElse(K key, V defaultValue); @Override default boolean hasDefiniteSize() { return true; } @Override default boolean isTraversableAgain() { return true; } @Override Iterator<Tuple2<K, V>> iterator(); /** * Iterates this Map sequentially, mapping the (key, value) pairs to elements. * * @param mapper A function that maps (key, value) pairs to elements of type U * @param <U> The type of the resulting elements * @return An iterator through the mapped elements. */ default <U> Iterator<U> iterator(BiFunction<K, V, ? extends U> mapper) { Objects.requireNonNull(mapper, "mapper is null"); return iterator().map(t -> mapper.apply(t._1, t._2)); } /** * Returns the keys contained in this map. * * @return {@code Set} of the keys contained in this map. */ io.vavr.collection.Set<K> keySet(); @Override default int length() { return size(); } /** * Turns this map into a plain function returning an Option result. * * @return a function that takes a key k and returns its value in a Some if found, otherwise a None. */ default Function1<K, Option<V>> lift() { return this::get; } /** * Maps the {@code Map} entries to a sequence of values. * <p> * Please use {@link #map(BiFunction)} if the result has to be of type {@code Map}. * * @param mapper A mapper * @param <U> Component type * @return A sequence of mapped values. */ @SuppressWarnings("unchecked") @Override default <U> Seq<U> map(Function<? super Tuple2<K, V>, ? extends U> mapper) { Objects.requireNonNull(mapper, "mapper is null"); // don't remove cast, doesn't compile in Eclipse without it return (Seq<U>) iterator().map(mapper).toStream(); } /** * Maps the entries of this {@code Map} to form a new {@code Map}. * * @param <K2> key's component type of the map result * @param <V2> value's component type of the map result * @param mapper a {@code Function} that maps entries of type {@code (K, V)} to entries of type {@code (K2, V2)} * @return a new {@code Map} * @throws NullPointerException if {@code mapper} is null */ <K2, V2> Map<K2, V2> map(BiFunction<? super K, ? super V, Tuple2<K2, V2>> mapper); /** * Maps the keys of this {@code Map} while preserving the corresponding values. * <p> * The size of the result map may be smaller if {@code keyMapper} maps two or more distinct keys to the same new key. * In this case the value at the {@code latest} of the original keys is retained. * Order of keys is predictable in {@code TreeMap} (by comparator) and {@code LinkedHashMap} (insertion-order) and not predictable in {@code HashMap}. * * @param <K2> the new key type * @param keyMapper a {@code Function} that maps keys of type {@code V} to keys of type {@code V2} * @return a new {@code Map} * @throws NullPointerException if {@code keyMapper} is null */ <K2> Map<K2, V> mapKeys(Function<? super K, ? extends K2> keyMapper); /** * Maps the keys of this {@code Map} while preserving the corresponding values and applying a value merge function on collisions. * <p> * The size of the result map may be smaller if {@code keyMapper} maps two or more distinct keys to the same new key. * In this case the associated values will be combined using {@code valueMerge}. * * @param <K2> the new key type * @param keyMapper a {@code Function} that maps keys of type {@code V} to keys of type {@code V2} * @param valueMerge a {@code BiFunction} that merges values * @return a new {@code Map} * @throws NullPointerException if {@code keyMapper} is null */ <K2> Map<K2, V> mapKeys(Function<? super K, ? extends K2> keyMapper, BiFunction<? super V, ? super V, ? extends V> valueMerge); /** * Maps the values of this {@code Map} while preserving the corresponding keys. * * @param <V2> the new value type * @param valueMapper a {@code Function} that maps values of type {@code V} to values of type {@code V2} * @return a new {@code Map} * @throws NullPointerException if {@code valueMapper} is null */ <V2> Map<K, V2> mapValues(Function<? super V, ? extends V2> valueMapper); /** * Creates a new map which by merging the entries of {@code this} map and {@code that} map. * <p> * If collisions occur, the value of {@code this} map is taken. * * @param that the other map * @return A merged map * @throws NullPointerException if that map is null */ Map<K, V> merge(Map<? extends K, ? extends V> that); /** * Creates a new map which by merging the entries of {@code this} map and {@code that} map. * <p> * Uses the specified collision resolution function if two keys are the same. * The collision resolution function will always take the first argument from <code>this</code> map * and the second from <code>that</code> map. * * @param <U> value type of that Map * @param that the other map * @param collisionResolution the collision resolution function * @return A merged map * @throws NullPointerException if that map or the given collision resolution function is null */ <U extends V> Map<K, V> merge(Map<? extends K, U> that, BiFunction<? super V, ? super U, ? extends V> collisionResolution); /** * Associates the specified value with the specified key in this map. * If the map previously contained a mapping for the key, the old value is * replaced by the specified value. * * @param key key with which the specified value is to be associated * @param value value to be associated with the specified key * @return A new Map containing these elements and that entry. */ Map<K, V> put(K key, V value); /** * Convenience method for {@code put(entry._1, entry._2)}. * * @param entry A Tuple2 containing the key and value * @return A new Map containing these elements and that entry. */ Map<K, V> put(Tuple2<? extends K, ? extends V> entry); /** * Associates the specified value with the specified key in this map. * If the map previously contained a mapping for the key, the merge * function is used to combine the previous value to the value to * be inserted, and the result of that call is inserted in the map. * * @param <U> the value type * @param key key with which the specified value is to be associated * @param value value to be associated with the specified key * @param merge function taking the old and new values and merging them. * @return A new Map containing these elements and that entry. */ <U extends V> Map<K, V> put(K key, U value, BiFunction<? super V, ? super U, ? extends V> merge); /** * Convenience method for {@code put(entry._1, entry._2, merge)}. * * @param <U> the value type * @param entry A Tuple2 containing the key and value * @param merge function taking the old and new values and merging them. * @return A new Map containing these elements and that entry. */ <U extends V> Map<K, V> put(Tuple2<? extends K, U> entry, BiFunction<? super V, ? super U, ? extends V> merge); /** * Removes the mapping for a key from this map if it is present. * * @param key key whose mapping is to be removed from the map * @return A new Map containing these elements without the entry * specified by that key. */ Map<K, V> remove(K key); /** * Returns a new Map consisting of all elements which do not satisfy the given predicate. * * @param predicate the predicate used to test elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> removeAll(BiPredicate<? super K, ? super V> predicate); /** * Removes the mapping for a key from this map if it is present. * * @param keys keys are to be removed from the map * @return A new Map containing these elements without the entries * specified by that keys. */ Map<K, V> removeAll(Iterable<? extends K> keys); /** * Returns a new Map consisting of all elements with keys which do not satisfy the given predicate. * * @param predicate the predicate used to test keys of elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> removeKeys(Predicate<? super K> predicate); /** * Returns a new Map consisting of all elements with values which do not satisfy the given predicate. * * @param predicate the predicate used to test values of elements * @return a new Map * @throws NullPointerException if {@code predicate} is null */ Map<K, V> removeValues(Predicate<? super V> predicate); @Override default <U> Seq<U> scanLeft(U zero, BiFunction<? super U, ? super Tuple2<K, V>, ? extends U> operation) { return io.vavr.collection.Collections.scanLeft(this, zero, operation, io.vavr.collection.Iterator::toVector); } @Override default <U> Seq<U> scanRight(U zero, BiFunction<? super Tuple2<K, V>, ? super U, ? extends U> operation) { return io.vavr.collection.Collections.scanRight(this, zero, operation, io.vavr.collection.Iterator::toVector); } @Override int size(); /** * Converts this Vavr {@code Map} to a {@code java.util.Map} while preserving characteristics * like insertion order ({@code LinkedHashMap}) and sort order ({@code SortedMap}). * * @return a new {@code java.util.Map} instance */ java.util.Map<K, V> toJavaMap(); /** * Transforms this {@code Map}. * * @param f A transformation * @param <U> Type of transformation result * @return An instance of type {@code U} * @throws NullPointerException if {@code f} is null */ default <U> U transform(Function<? super Map<K, V>, ? extends U> f) { Objects.requireNonNull(f, "f is null"); return f.apply(this); } default Tuple2<Seq<K>, Seq<V>> unzip() { return unzip(Function.identity()); } default <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(BiFunction<? super K, ? super V, Tuple2<? extends T1, ? extends T2>> unzipper) { Objects.requireNonNull(unzipper, "unzipper is null"); return unzip(entry -> unzipper.apply(entry._1, entry._2)); } @Override default <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Function<? super Tuple2<K, V>, Tuple2<? extends T1, ? extends T2>> unzipper) { Objects.requireNonNull(unzipper, "unzipper is null"); return iterator().unzip(unzipper).map(Stream::ofAll, Stream::ofAll); } default <T1, T2, T3> Tuple3<Seq<T1>, Seq<T2>, Seq<T3>> unzip3(BiFunction<? super K, ? super V, Tuple3<? extends T1, ? extends T2, ? extends T3>> unzipper) { Objects.requireNonNull(unzipper, "unzipper is null"); return unzip3(entry -> unzipper.apply(entry._1, entry._2)); } @Override default <T1, T2, T3> Tuple3<Seq<T1>, Seq<T2>, Seq<T3>> unzip3( Function<? super Tuple2<K, V>, Tuple3<? extends T1, ? extends T2, ? extends T3>> unzipper) { Objects.requireNonNull(unzipper, "unzipper is null"); return iterator().unzip3(unzipper).map(Stream::ofAll, Stream::ofAll, Stream::ofAll); } Seq<V> values(); /** * Turns this map from a partial function into a total function that * returns a value computed by defaultFunction for all keys * absent from the map. * * @param defaultFunction function to evaluate for all keys not present in the map * @return a total function from K to T */ default Function1<K, V> withDefault(Function<? super K, ? extends V> defaultFunction) { return k -> get(k).getOrElse(() -> defaultFunction.apply(k)); } /** * Turns this map from a partial function into a total function that * returns defaultValue for all keys absent from the map. * * @param defaultValue default value to return for all keys not present in the map * @return a total function from K to T */ default Function1<K, V> withDefaultValue(V defaultValue) { return k -> get(k).getOrElse(defaultValue); } @Override default <U> Seq<Tuple2<Tuple2<K, V>, U>> zip(Iterable<? extends U> that) { return zipWith(that, Tuple::of); } @Override default <U, R> Seq<R> zipWith(Iterable<? extends U> that, BiFunction<? super Tuple2<K, V>, ? super U, ? extends R> mapper) { Objects.requireNonNull(that, "that is null"); Objects.requireNonNull(mapper, "mapper is null"); return Stream.ofAll(iterator().zipWith(that, mapper)); } @Override default <U> Seq<Tuple2<Tuple2<K, V>, U>> zipAll(Iterable<? extends U> that, Tuple2<K, V> thisElem, U thatElem) { Objects.requireNonNull(that, "that is null"); return Stream.ofAll(iterator().zipAll(that, thisElem, thatElem)); } @Override default Seq<Tuple2<Tuple2<K, V>, Integer>> zipWithIndex() { return zipWithIndex(Tuple::of); } @Override default <U> Seq<U> zipWithIndex(BiFunction<? super Tuple2<K, V>, ? super Integer, ? extends U> mapper) { Objects.requireNonNull(mapper, "mapper is null"); return Stream.ofAll(iterator().zipWithIndex(mapper)); } // -- Adjusted return types of Traversable methods @Override Map<K, V> distinct(); @Override Map<K, V> distinctBy(Comparator<? super Tuple2<K, V>> comparator); @Override <U> Map<K, V> distinctBy(Function<? super Tuple2<K, V>, ? extends U> keyExtractor); @Override Map<K, V> drop(int n); @Override Map<K, V> dropRight(int n); @Override Map<K, V> dropUntil(Predicate<? super Tuple2<K, V>> predicate); @Override Map<K, V> dropWhile(Predicate<? super Tuple2<K, V>> predicate); @Override Map<K, V> filter(Predicate<? super Tuple2<K, V>> predicate); @Override <C> Map<C, ? extends Map<K, V>> groupBy(Function<? super Tuple2<K, V>, ? extends C> classifier); @Override io.vavr.collection.Iterator<? extends Map<K, V>> grouped(int size); @Override default boolean isDefinedAt(K key) { return containsKey(key); } @Override default boolean isDistinct() { return true; } @Override Map<K, V> init(); @Override Option<? extends Map<K, V>> initOption(); @Override Map<K, V> orElse(Iterable<? extends Tuple2<K, V>> other); @Override Map<K, V> orElse(Supplier<? extends Iterable<? extends Tuple2<K, V>>> supplier); @Override Tuple2<? extends Map<K, V>, ? extends Map<K, V>> partition(Predicate<? super Tuple2<K, V>> predicate); @Override Map<K, V> peek(Consumer<? super Tuple2<K, V>> action); @Override Map<K, V> replace(Tuple2<K, V> currentElement, Tuple2<K, V> newElement); /** * Replaces the entry for the specified key only if it is currently mapped to some value. * * @param key the key of the element to be substituted. * @param value the new value to be associated with the key * @return a new map containing key mapped to value if key was contained before. The old map otherwise. */ Map<K, V> replaceValue(K key, V value); /** * Replaces the entry for the specified key only if currently mapped to the specified value. * * @param key the key of the element to be substituted. * @param oldValue the expected current value that the key is currently mapped to * @param newValue the new value to be associated with the key * @return a new map containing key mapped to newValue if key was contained before and oldValue matched. The old map otherwise. */ Map<K, V> replace(K key, V oldValue, V newValue); /** * Replaces each entry's value with the result of invoking the given function on that entry until all entries have been processed or the function throws an exception. * * @param function function transforming key and current value to a new value * @return a new map with the same keySet but transformed values. */ Map<K, V> replaceAll(BiFunction<? super K, ? super V, ? extends V> function); @Override Map<K, V> replaceAll(Tuple2<K, V> currentElement, Tuple2<K, V> newElement); @Override Map<K, V> retainAll(Iterable<? extends Tuple2<K, V>> elements); @Override Map<K, V> scan(Tuple2<K, V> zero, BiFunction<? super Tuple2<K, V>, ? super Tuple2<K, V>, ? extends Tuple2<K, V>> operation); @Override io.vavr.collection.Iterator<? extends Map<K, V>> slideBy(Function<? super Tuple2<K, V>, ?> classifier); @Override io.vavr.collection.Iterator<? extends Map<K, V>> sliding(int size); @Override io.vavr.collection.Iterator<? extends Map<K, V>> sliding(int size, int step); @Override Tuple2<? extends Map<K, V>, ? extends Map<K, V>> span(Predicate<? super Tuple2<K, V>> predicate); @Override Map<K, V> tail(); @Override Option<? extends Map<K, V>> tailOption(); @Override Map<K, V> take(int n); @Override Map<K, V> takeRight(int n); @Override Map<K, V> takeUntil(Predicate<? super Tuple2<K, V>> predicate); @Override Map<K, V> takeWhile(Predicate<? super Tuple2<K, V>> predicate); }
1
12,354
follow-up of #2002
vavr-io-vavr
java
@@ -162,7 +162,7 @@ public class TiSession implements AutoCloseable { if (tableScanThreadPool == null) { tableScanThreadPool = Executors.newFixedThreadPool( - conf.getTableScanConcurrency(), + conf.getTableScanConcurrency() / conf.getPartitionPerSplit(), new ThreadFactoryBuilder().setDaemon(true).build()); } res = tableScanThreadPool;
1
/* * Copyright 2017 PingCAP, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * See the License for the specific language governing permissions and * limitations under the License. */ package com.pingcap.tikv; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.pingcap.tikv.catalog.Catalog; import com.pingcap.tikv.event.CacheInvalidateEvent; import com.pingcap.tikv.meta.TiTimestamp; import com.pingcap.tikv.region.RegionManager; import com.pingcap.tikv.region.RegionStoreClient; import com.pingcap.tikv.txn.TxnKVClient; import com.pingcap.tikv.util.ChannelFactory; import com.pingcap.tikv.util.ConcreteBackOffer; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.function.Function; public class TiSession implements AutoCloseable { private final TiConfiguration conf; private final ChannelFactory channelFactory; private Function<CacheInvalidateEvent, Void> cacheInvalidateCallback; // below object creation is either heavy or making connection (pd), pending for lazy loading private volatile PDClient client; private volatile Catalog catalog; private volatile ExecutorService indexScanThreadPool; private volatile ExecutorService tableScanThreadPool; private volatile RegionManager regionManager; private volatile RegionStoreClient.RegionStoreClientBuilder clientBuilder; private static final Map<String, TiSession> sessionCachedMap = new HashMap<>(); public static TiSession getInstance(TiConfiguration conf) { synchronized (sessionCachedMap) { String key = conf.getPdAddrsString(); if (sessionCachedMap.containsKey(key)) { return sessionCachedMap.get(key); } TiSession newSession = new TiSession(conf); sessionCachedMap.put(key, newSession); return newSession; } } private TiSession(TiConfiguration conf) { this.conf = conf; this.channelFactory = new ChannelFactory(conf.getMaxFrameSize()); this.regionManager = null; this.clientBuilder = null; } public TxnKVClient createTxnClient() { return new TxnKVClient(conf, this.getRegionStoreClientBuilder(), this.getPDClient()); } public RegionStoreClient.RegionStoreClientBuilder getRegionStoreClientBuilder() { RegionStoreClient.RegionStoreClientBuilder res = clientBuilder; if (res == null) { synchronized (this) { if (clientBuilder == null) { clientBuilder = new RegionStoreClient.RegionStoreClientBuilder( conf, this.channelFactory, this.getRegionManager()); } res = clientBuilder; } } return res; } public TiConfiguration getConf() { return conf; } public TiTimestamp getTimestamp() { return getPDClient().getTimestamp(ConcreteBackOffer.newTsoBackOff()); } public Snapshot createSnapshot() { return new Snapshot(getTimestamp(), this.conf); } public Snapshot createSnapshot(TiTimestamp ts) { return new Snapshot(ts, conf); } public PDClient getPDClient() { PDClient res = client; if (res == null) { synchronized (this) { if (client == null) { client = PDClient.createRaw(this.getConf(), channelFactory); } res = client; } } return res; } public Catalog getCatalog() { Catalog res = catalog; if (res == null) { synchronized (this) { if (catalog == null) { catalog = new Catalog(this::createSnapshot, conf.ifShowRowId(), conf.getDBPrefix()); } res = catalog; } } return res; } public synchronized RegionManager getRegionManager() { RegionManager res = regionManager; if (res == null) { synchronized (this) { if (regionManager == null) { regionManager = new RegionManager(getPDClient(), this.cacheInvalidateCallback); } res = regionManager; } } return res; } public ExecutorService getThreadPoolForIndexScan() { ExecutorService res = indexScanThreadPool; if (res == null) { synchronized (this) { if (indexScanThreadPool == null) { indexScanThreadPool = Executors.newFixedThreadPool( conf.getIndexScanConcurrency(), new ThreadFactoryBuilder().setDaemon(true).build()); } res = indexScanThreadPool; } } return res; } public ExecutorService getThreadPoolForTableScan() { ExecutorService res = tableScanThreadPool; if (res == null) { synchronized (this) { if (tableScanThreadPool == null) { tableScanThreadPool = Executors.newFixedThreadPool( conf.getTableScanConcurrency(), new ThreadFactoryBuilder().setDaemon(true).build()); } res = tableScanThreadPool; } } return res; } /** * This is used for setting call back function to invalidate cache information * * @param callBackFunc callback function */ public void injectCallBackFunc(Function<CacheInvalidateEvent, Void> callBackFunc) { this.cacheInvalidateCallback = callBackFunc; } @Override public synchronized void close() throws Exception { synchronized (sessionCachedMap) { sessionCachedMap.remove(conf.getPdAddrsString()); } if (tableScanThreadPool != null) { tableScanThreadPool.shutdownNow(); } if (indexScanThreadPool != null) { indexScanThreadPool.shutdownNow(); } if (client != null) { getPDClient().close(); } if (catalog != null) { getCatalog().close(); } } }
1
12,081
could you give some explanations of what problem do you want to solve by this change, and how/why it works?
pingcap-tispark
java
@@ -2,16 +2,18 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; +using Xunit; using Xunit.Abstractions; using Xunit.Sdk; namespace Xunit.NetCore.Extensions { /// <summary>Wraps another test case that should be skipped.</summary> - internal sealed class SkippedTestCase : IXunitTestCase + internal sealed class SkippedTestCase : LongLivedMarshalByRefObject, IXunitTestCase { private readonly IXunitTestCase _testCase; private readonly string _skippedReason;
1
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Xunit.Abstractions; using Xunit.Sdk; namespace Xunit.NetCore.Extensions { /// <summary>Wraps another test case that should be skipped.</summary> internal sealed class SkippedTestCase : IXunitTestCase { private readonly IXunitTestCase _testCase; private readonly string _skippedReason; internal SkippedTestCase(IXunitTestCase testCase, string skippedReason) { _testCase = testCase; _skippedReason = skippedReason; } public string DisplayName { get { return _testCase.DisplayName; } } public IMethodInfo Method { get { return _testCase.Method; } } public string SkipReason { get { return _skippedReason; } } public ISourceInformation SourceInformation { get { return _testCase.SourceInformation; } set { _testCase.SourceInformation = value; } } public ITestMethod TestMethod { get { return _testCase.TestMethod; } } public object[] TestMethodArguments { get { return _testCase.TestMethodArguments; } } public Dictionary<string, List<string>> Traits { get { return _testCase.Traits; } } public string UniqueID { get { return _testCase.UniqueID; } } public void Deserialize(IXunitSerializationInfo info) { _testCase.Deserialize(info); } public Task<RunSummary> RunAsync( IMessageSink diagnosticMessageSink, IMessageBus messageBus, object[] constructorArguments, ExceptionAggregator aggregator, CancellationTokenSource cancellationTokenSource) { return new XunitTestCaseRunner(this, DisplayName, _skippedReason, constructorArguments, TestMethodArguments, messageBus, aggregator, cancellationTokenSource).RunAsync(); } public void Serialize(IXunitSerializationInfo info) { _testCase.Serialize(info); } } }
1
12,779
LongLivedMarshalByRefObject exists in both the netstandard and netfx xunit?
dotnet-buildtools
.cs
@@ -47,6 +47,12 @@ module Selenium @bridge.send_command(cmd: cmd, params: params) end + def print_page(**options) + options[:page_ranges] &&= Array(options[:page_ranges]) + + bridge.print_page(options) + end + private def debugger_address
1
# frozen_string_literal: true # Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. module Selenium module WebDriver module Chrome # # Driver implementation for Chrome. # @api private # class Driver < WebDriver::Driver include DriverExtensions::HasNetworkConditions include DriverExtensions::HasWebStorage include DriverExtensions::HasLocation include DriverExtensions::DownloadsFiles include DriverExtensions::HasDevTools include DriverExtensions::HasAuthentication include DriverExtensions::HasLogEvents def browser :chrome end def bridge_class Bridge end def execute_cdp(cmd, **params) @bridge.send_command(cmd: cmd, params: params) end private def debugger_address capabilities['goog:chromeOptions']['debuggerAddress'] end end # Driver end # Chrome end # WebDriver end # Selenium
1
18,367
the bridge here isn't defined as an accessor / reader to try mask it better. So you need to directly call the iVar `@bridge` here.
SeleniumHQ-selenium
rb
@@ -85,12 +85,11 @@ func Run(ctx context.Context, cfg *config.Node) error { } logrus.Infof("Running containerd %s", config.ArgString(args[1:])) - cmd := exec.Command(args[0], args[1:]...) + cmd := exec.CommandContext(ctx, args[0], args[1:]...) cmd.Stdout = stdOut cmd.Stderr = stdErr cmd.Env = env - addDeathSig(cmd) if err := cmd.Run(); err != nil { fmt.Fprintf(os.Stderr, "containerd: %s\n", err) }
1
package containerd import ( "bufio" "compress/bzip2" "compress/gzip" "context" "fmt" "io" "io/ioutil" "os" "os/exec" "path/filepath" "strings" "time" "github.com/containerd/containerd" "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" "github.com/containerd/containerd/leases" "github.com/containerd/containerd/namespaces" "github.com/containerd/containerd/reference/docker" "github.com/klauspost/compress/zstd" "github.com/natefinch/lumberjack" "github.com/pierrec/lz4" "github.com/pkg/errors" util2 "github.com/rancher/k3s/pkg/agent/util" "github.com/rancher/k3s/pkg/daemons/config" "github.com/rancher/k3s/pkg/untar" "github.com/rancher/k3s/pkg/version" "github.com/rancher/wrangler/pkg/merr" "github.com/sirupsen/logrus" "google.golang.org/grpc" runtimeapi "k8s.io/cri-api/pkg/apis/runtime/v1alpha2" ) const ( maxMsgSize = 1024 * 1024 * 16 ) // Run configures and starts containerd as a child process. Once it is up, images are preloaded // or pulled from files found in the agent images directory. func Run(ctx context.Context, cfg *config.Node) error { args := getContainerdArgs(cfg) if err := setupContainerdConfig(ctx, cfg); err != nil { return err } stdOut := io.Writer(os.Stdout) stdErr := io.Writer(os.Stderr) if cfg.Containerd.Log != "" { logrus.Infof("Logging containerd to %s", cfg.Containerd.Log) stdOut = &lumberjack.Logger{ Filename: cfg.Containerd.Log, MaxSize: 50, MaxBackups: 3, MaxAge: 28, Compress: true, } stdErr = stdOut } go func() { env := []string{} for _, e := range os.Environ() { pair := strings.SplitN(e, "=", 2) switch { case pair[0] == "NOTIFY_SOCKET": // elide NOTIFY_SOCKET to prevent spurious notifications to systemd case pair[0] == "CONTAINERD_LOG_LEVEL": // Turn CONTAINERD_LOG_LEVEL variable into log-level flag args = append(args, "--log-level", pair[1]) case strings.HasPrefix(pair[0], "CONTAINERD_"): // Strip variables with CONTAINERD_ prefix before passing through // This allows doing things like setting a proxy for image pulls by setting // CONTAINERD_https_proxy=http://proxy.example.com:8080 pair[0] = strings.TrimPrefix(pair[0], "CONTAINERD_") fallthrough default: env = append(env, strings.Join(pair, "=")) } } logrus.Infof("Running containerd %s", config.ArgString(args[1:])) cmd := exec.Command(args[0], args[1:]...) cmd.Stdout = stdOut cmd.Stderr = stdErr cmd.Env = env addDeathSig(cmd) if err := cmd.Run(); err != nil { fmt.Fprintf(os.Stderr, "containerd: %s\n", err) } os.Exit(1) }() first := true for { conn, err := CriConnection(ctx, cfg.Containerd.Address) if err == nil { conn.Close() break } if first { first = false } else { logrus.Infof("Waiting for containerd startup: %v", err) } select { case <-ctx.Done(): return ctx.Err() case <-time.After(time.Second): } } logrus.Info("Containerd is now running") return preloadImages(ctx, cfg) } // preloadImages reads the contents of the agent images directory, and attempts to // import into containerd any files found there. Supported compressed types are decompressed, and // any .txt files are processed as a list of images that should be pre-pulled from remote registries. // If configured, imported images are retagged as being pulled from additional registries. func preloadImages(ctx context.Context, cfg *config.Node) error { fileInfo, err := os.Stat(cfg.Images) if os.IsNotExist(err) { return nil } else if err != nil { logrus.Errorf("Unable to find images in %s: %v", cfg.Images, err) return nil } if !fileInfo.IsDir() { return nil } fileInfos, err := ioutil.ReadDir(cfg.Images) if err != nil { logrus.Errorf("Unable to read images in %s: %v", cfg.Images, err) return nil } client, err := containerd.New(cfg.Containerd.Address) if err != nil { return err } defer client.Close() criConn, err := CriConnection(ctx, cfg.Containerd.Address) if err != nil { return err } defer criConn.Close() // Ensure that nothing else can modify the image store while we're importing, // and that our images are imported into the k8s.io namespace ctx = namespaces.WithNamespace(ctx, "k8s.io") // At startup all leases from k3s are cleared ls := client.LeasesService() existingLeases, err := ls.List(ctx) if err != nil { return err } for _, lease := range existingLeases { if lease.ID == version.Program { logrus.Debugf("Deleting existing lease: %v", lease) ls.Delete(ctx, lease) } } // Any images found on import are given a lease that never expires _, err = ls.Create(ctx, leases.WithID(version.Program)) if err != nil { return err } for _, fileInfo := range fileInfos { if fileInfo.IsDir() { continue } start := time.Now() filePath := filepath.Join(cfg.Images, fileInfo.Name()) if err := preloadFile(ctx, cfg, client, criConn, filePath); err != nil { logrus.Errorf("Error encountered while importing %s: %v", filePath, err) continue } logrus.Debugf("Imported images from %s in %s", filePath, time.Since(start)) } return nil } // preloadFile handles loading images from a single tarball or pre-pull image list. // This is in its own function so that we can ensure that the various readers are properly closed, as some // decompressing readers need to be explicitly closed and others do not. func preloadFile(ctx context.Context, cfg *config.Node, client *containerd.Client, criConn *grpc.ClientConn, filePath string) error { file, err := os.Open(filePath) if err != nil { return err } defer file.Close() var imageReader io.Reader switch { case util2.HasSuffixI(filePath, ".txt"): return prePullImages(ctx, criConn, file) case util2.HasSuffixI(filePath, ".tar"): imageReader = file case util2.HasSuffixI(filePath, ".tar.lz4"): imageReader = lz4.NewReader(file) case util2.HasSuffixI(filePath, ".tar.bz2", ".tbz"): imageReader = bzip2.NewReader(file) case util2.HasSuffixI(filePath, ".tar.gz", ".tgz"): zr, err := gzip.NewReader(file) if err != nil { return err } defer zr.Close() imageReader = zr case util2.HasSuffixI(filePath, "tar.zst", ".tzst"): zr, err := zstd.NewReader(file, zstd.WithDecoderMaxMemory(untar.MaxDecoderMemory)) if err != nil { return err } defer zr.Close() imageReader = zr default: return errors.New("unhandled file type") } logrus.Infof("Importing images from %s", filePath) images, err := client.Import(ctx, imageReader, containerd.WithAllPlatforms(true)) if err != nil { return err } return retagImages(ctx, client, images, cfg.AgentConfig.AirgapExtraRegistry) } // retagImages retags all listed images as having been pulled from the given remote registries. // If duplicate images exist, they are overwritten. This is most useful when using a private registry // for all images, as can be configured by the RKE2/Rancher system-default-registry setting. func retagImages(ctx context.Context, client *containerd.Client, images []images.Image, registries []string) error { var errs []error imageService := client.ImageService() for _, image := range images { name, err := parseNamedTagged(image.Name) if err != nil { errs = append(errs, errors.Wrap(err, "failed to parse image name")) continue } logrus.Infof("Imported %s", image.Name) for _, registry := range registries { image.Name = fmt.Sprintf("%s/%s:%s", registry, docker.Path(name), name.Tag()) if _, err = imageService.Create(ctx, image); err != nil { if errdefs.IsAlreadyExists(err) { if err = imageService.Delete(ctx, image.Name); err != nil { errs = append(errs, errors.Wrap(err, "failed to delete existing image")) continue } if _, err = imageService.Create(ctx, image); err != nil { errs = append(errs, errors.Wrap(err, "failed to tag after deleting existing image")) continue } } else { errs = append(errs, errors.Wrap(err, "failed to tag image")) continue } } logrus.Infof("Tagged %s", image.Name) } } return merr.NewErrors(errs...) } // parseNamedTagged parses and normalizes an image name, and converts the resulting reference // to a type that exposes the tag. func parseNamedTagged(name string) (docker.NamedTagged, error) { ref, err := docker.ParseNormalizedNamed(name) if err != nil { return nil, err } tagged, ok := ref.(docker.NamedTagged) if !ok { return nil, fmt.Errorf("can't cast %T to NamedTagged", ref) } return tagged, nil } // prePullImages asks containerd to pull images in a given list, so that they // are ready when the containers attempt to start later. func prePullImages(ctx context.Context, conn *grpc.ClientConn, images io.Reader) error { imageClient := runtimeapi.NewImageServiceClient(conn) scanner := bufio.NewScanner(images) for scanner.Scan() { line := strings.TrimSpace(scanner.Text()) resp, err := imageClient.ImageStatus(ctx, &runtimeapi.ImageStatusRequest{ Image: &runtimeapi.ImageSpec{ Image: line, }, }) if err == nil && resp.Image != nil { continue } logrus.Infof("Pulling image %s...", line) _, err = imageClient.PullImage(ctx, &runtimeapi.PullImageRequest{ Image: &runtimeapi.ImageSpec{ Image: line, }, }) if err != nil { logrus.Errorf("Failed to pull %s: %v", line, err) } } return nil }
1
9,961
Testing on linux with this change, containerd no longer stops when k3s stops.
k3s-io-k3s
go
@@ -266,6 +266,11 @@ type Config struct { // MaxConnectionReceiveWindow is the connection-level flow control window for receiving data. // If this value is zero, it will default to 15 MB. MaxConnectionReceiveWindow uint64 + // AllowConnectionWindowIncrease is called every time the connection flow controller attempts + // to increase the connection flow control window. + // If set, the caller can prevent an increase of the window. Typically, it would do so to + // limit the memory usage. + AllowConnectionWindowIncrease func(sess Session, delta int) bool // MaxIncomingStreams is the maximum number of concurrent bidirectional streams that a peer is allowed to open. // Values above 2^60 are invalid. // If not set, it will default to 100.
1
package quic import ( "context" "errors" "io" "net" "time" "github.com/lucas-clemente/quic-go/internal/handshake" "github.com/lucas-clemente/quic-go/internal/protocol" "github.com/lucas-clemente/quic-go/logging" ) // The StreamID is the ID of a QUIC stream. type StreamID = protocol.StreamID // A VersionNumber is a QUIC version number. type VersionNumber = protocol.VersionNumber const ( // VersionDraft29 is IETF QUIC draft-29 VersionDraft29 = protocol.VersionDraft29 // Version1 is RFC 9000 Version1 = protocol.Version1 ) // A Token can be used to verify the ownership of the client address. type Token struct { // IsRetryToken encodes how the client received the token. There are two ways: // * In a Retry packet sent when trying to establish a new connection. // * In a NEW_TOKEN frame on a previous connection. IsRetryToken bool RemoteAddr string SentTime time.Time } // A ClientToken is a token received by the client. // It can be used to skip address validation on future connection attempts. type ClientToken struct { data []byte } type TokenStore interface { // Pop searches for a ClientToken associated with the given key. // Since tokens are not supposed to be reused, it must remove the token from the cache. // It returns nil when no token is found. Pop(key string) (token *ClientToken) // Put adds a token to the cache with the given key. It might get called // multiple times in a connection. Put(key string, token *ClientToken) } // Err0RTTRejected is the returned from: // * Open{Uni}Stream{Sync} // * Accept{Uni}Stream // * Stream.Read and Stream.Write // when the server rejects a 0-RTT connection attempt. var Err0RTTRejected = errors.New("0-RTT rejected") // SessionTracingKey can be used to associate a ConnectionTracer with a Session. // It is set on the Session.Context() context, // as well as on the context passed to logging.Tracer.NewConnectionTracer. var SessionTracingKey = sessionTracingCtxKey{} type sessionTracingCtxKey struct{} // Stream is the interface implemented by QUIC streams // In addition to the errors listed on the Session, // calls to stream functions can return a StreamError if the stream is canceled. type Stream interface { ReceiveStream SendStream // SetDeadline sets the read and write deadlines associated // with the connection. It is equivalent to calling both // SetReadDeadline and SetWriteDeadline. SetDeadline(t time.Time) error } // A ReceiveStream is a unidirectional Receive Stream. type ReceiveStream interface { // StreamID returns the stream ID. StreamID() StreamID // Read reads data from the stream. // Read can be made to time out and return a net.Error with Timeout() == true // after a fixed time limit; see SetDeadline and SetReadDeadline. // If the stream was canceled by the peer, the error implements the StreamError // interface, and Canceled() == true. // If the session was closed due to a timeout, the error satisfies // the net.Error interface, and Timeout() will be true. io.Reader // CancelRead aborts receiving on this stream. // It will ask the peer to stop transmitting stream data. // Read will unblock immediately, and future Read calls will fail. // When called multiple times or after reading the io.EOF it is a no-op. CancelRead(StreamErrorCode) // SetReadDeadline sets the deadline for future Read calls and // any currently-blocked Read call. // A zero value for t means Read will not time out. SetReadDeadline(t time.Time) error } // A SendStream is a unidirectional Send Stream. type SendStream interface { // StreamID returns the stream ID. StreamID() StreamID // Write writes data to the stream. // Write can be made to time out and return a net.Error with Timeout() == true // after a fixed time limit; see SetDeadline and SetWriteDeadline. // If the stream was canceled by the peer, the error implements the StreamError // interface, and Canceled() == true. // If the session was closed due to a timeout, the error satisfies // the net.Error interface, and Timeout() will be true. io.Writer // Close closes the write-direction of the stream. // Future calls to Write are not permitted after calling Close. // It must not be called concurrently with Write. // It must not be called after calling CancelWrite. io.Closer // CancelWrite aborts sending on this stream. // Data already written, but not yet delivered to the peer is not guaranteed to be delivered reliably. // Write will unblock immediately, and future calls to Write will fail. // When called multiple times or after closing the stream it is a no-op. CancelWrite(StreamErrorCode) // The Context is canceled as soon as the write-side of the stream is closed. // This happens when Close() or CancelWrite() is called, or when the peer // cancels the read-side of their stream. // Warning: This API should not be considered stable and might change soon. Context() context.Context // SetWriteDeadline sets the deadline for future Write calls // and any currently-blocked Write call. // Even if write times out, it may return n > 0, indicating that // some data was successfully written. // A zero value for t means Write will not time out. SetWriteDeadline(t time.Time) error } // A Session is a QUIC connection between two peers. // Calls to the session (and to streams) can return the following types of errors: // * ApplicationError: for errors triggered by the application running on top of QUIC // * TransportError: for errors triggered by the QUIC transport (in many cases a misbehaving peer) // * IdleTimeoutError: when the peer goes away unexpectedly (this is a net.Error timeout error) // * HandshakeTimeoutError: when the cryptographic handshake takes too long (this is a net.Error timeout error) // * StatelessResetError: when we receive a stateless reset (this is a net.Error temporary error) // * VersionNegotiationError: returned by the client, when there's no version overlap between the peers type Session interface { // AcceptStream returns the next stream opened by the peer, blocking until one is available. // If the session was closed due to a timeout, the error satisfies // the net.Error interface, and Timeout() will be true. AcceptStream(context.Context) (Stream, error) // AcceptUniStream returns the next unidirectional stream opened by the peer, blocking until one is available. // If the session was closed due to a timeout, the error satisfies // the net.Error interface, and Timeout() will be true. AcceptUniStream(context.Context) (ReceiveStream, error) // OpenStream opens a new bidirectional QUIC stream. // There is no signaling to the peer about new streams: // The peer can only accept the stream after data has been sent on the stream. // If the error is non-nil, it satisfies the net.Error interface. // When reaching the peer's stream limit, err.Temporary() will be true. // If the session was closed due to a timeout, Timeout() will be true. OpenStream() (Stream, error) // OpenStreamSync opens a new bidirectional QUIC stream. // It blocks until a new stream can be opened. // If the error is non-nil, it satisfies the net.Error interface. // If the session was closed due to a timeout, Timeout() will be true. OpenStreamSync(context.Context) (Stream, error) // OpenUniStream opens a new outgoing unidirectional QUIC stream. // If the error is non-nil, it satisfies the net.Error interface. // When reaching the peer's stream limit, Temporary() will be true. // If the session was closed due to a timeout, Timeout() will be true. OpenUniStream() (SendStream, error) // OpenUniStreamSync opens a new outgoing unidirectional QUIC stream. // It blocks until a new stream can be opened. // If the error is non-nil, it satisfies the net.Error interface. // If the session was closed due to a timeout, Timeout() will be true. OpenUniStreamSync(context.Context) (SendStream, error) // LocalAddr returns the local address. LocalAddr() net.Addr // RemoteAddr returns the address of the peer. RemoteAddr() net.Addr // CloseWithError closes the connection with an error. // The error string will be sent to the peer. CloseWithError(ApplicationErrorCode, string) error // The context is cancelled when the session is closed. // Warning: This API should not be considered stable and might change soon. Context() context.Context // ConnectionState returns basic details about the QUIC connection. // It blocks until the handshake completes. // Warning: This API should not be considered stable and might change soon. ConnectionState() ConnectionState // SendMessage sends a message as a datagram. // See https://datatracker.ietf.org/doc/draft-pauly-quic-datagram/. SendMessage([]byte) error // ReceiveMessage gets a message received in a datagram. // See https://datatracker.ietf.org/doc/draft-pauly-quic-datagram/. ReceiveMessage() ([]byte, error) } // An EarlySession is a session that is handshaking. // Data sent during the handshake is encrypted using the forward secure keys. // When using client certificates, the client's identity is only verified // after completion of the handshake. type EarlySession interface { Session // HandshakeComplete blocks until the handshake completes (or fails). // Data sent before completion of the handshake is encrypted with 1-RTT keys. // Note that the client's identity hasn't been verified yet. HandshakeComplete() context.Context NextSession() Session } // Config contains all configuration data needed for a QUIC server or client. type Config struct { // The QUIC versions that can be negotiated. // If not set, it uses all versions available. // Warning: This API should not be considered stable and will change soon. Versions []VersionNumber // The length of the connection ID in bytes. // It can be 0, or any value between 4 and 18. // If not set, the interpretation depends on where the Config is used: // If used for dialing an address, a 0 byte connection ID will be used. // If used for a server, or dialing on a packet conn, a 4 byte connection ID will be used. // When dialing on a packet conn, the ConnectionIDLength value must be the same for every Dial call. ConnectionIDLength int // HandshakeIdleTimeout is the idle timeout before completion of the handshake. // Specifically, if we don't receive any packet from the peer within this time, the connection attempt is aborted. // If this value is zero, the timeout is set to 5 seconds. HandshakeIdleTimeout time.Duration // MaxIdleTimeout is the maximum duration that may pass without any incoming network activity. // The actual value for the idle timeout is the minimum of this value and the peer's. // This value only applies after the handshake has completed. // If the timeout is exceeded, the connection is closed. // If this value is zero, the timeout is set to 30 seconds. MaxIdleTimeout time.Duration // AcceptToken determines if a Token is accepted. // It is called with token = nil if the client didn't send a token. // If not set, a default verification function is used: // * it verifies that the address matches, and // * if the token is a retry token, that it was issued within the last 5 seconds // * else, that it was issued within the last 24 hours. // This option is only valid for the server. AcceptToken func(clientAddr net.Addr, token *Token) bool // The TokenStore stores tokens received from the server. // Tokens are used to skip address validation on future connection attempts. // The key used to store tokens is the ServerName from the tls.Config, if set // otherwise the token is associated with the server's IP address. TokenStore TokenStore // InitialStreamReceiveWindow is the initial size of the stream-level flow control window for receiving data. // If the application is consuming data quickly enough, the flow control auto-tuning algorithm // will increase the window up to MaxStreamReceiveWindow. // If this value is zero, it will default to 512 KB. InitialStreamReceiveWindow uint64 // MaxStreamReceiveWindow is the maximum stream-level flow control window for receiving data. // If this value is zero, it will default to 6 MB. MaxStreamReceiveWindow uint64 // InitialConnectionReceiveWindow is the initial size of the stream-level flow control window for receiving data. // If the application is consuming data quickly enough, the flow control auto-tuning algorithm // will increase the window up to MaxConnectionReceiveWindow. // If this value is zero, it will default to 512 KB. InitialConnectionReceiveWindow uint64 // MaxConnectionReceiveWindow is the connection-level flow control window for receiving data. // If this value is zero, it will default to 15 MB. MaxConnectionReceiveWindow uint64 // MaxIncomingStreams is the maximum number of concurrent bidirectional streams that a peer is allowed to open. // Values above 2^60 are invalid. // If not set, it will default to 100. // If set to a negative value, it doesn't allow any bidirectional streams. MaxIncomingStreams int64 // MaxIncomingUniStreams is the maximum number of concurrent unidirectional streams that a peer is allowed to open. // Values above 2^60 are invalid. // If not set, it will default to 100. // If set to a negative value, it doesn't allow any unidirectional streams. MaxIncomingUniStreams int64 // The StatelessResetKey is used to generate stateless reset tokens. // If no key is configured, sending of stateless resets is disabled. StatelessResetKey []byte // KeepAlive defines whether this peer will periodically send a packet to keep the connection alive. KeepAlive bool // DisablePathMTUDiscovery disables Path MTU Discovery (RFC 8899). // Packets will then be at most 1252 (IPv4) / 1232 (IPv6) bytes in size. // Note that Path MTU discovery is always disabled on Windows, see https://github.com/lucas-clemente/quic-go/issues/3273. DisablePathMTUDiscovery bool // DisableVersionNegotiationPackets disables the sending of Version Negotiation packets. // This can be useful if version information is exchanged out-of-band. // It has no effect for a client. DisableVersionNegotiationPackets bool // See https://datatracker.ietf.org/doc/draft-ietf-quic-datagram/. // Datagrams will only be available when both peers enable datagram support. EnableDatagrams bool Tracer logging.Tracer } // ConnectionState records basic details about a QUIC connection type ConnectionState struct { TLS handshake.ConnectionState SupportsDatagrams bool } // A Listener for incoming QUIC connections type Listener interface { // Close the server. All active sessions will be closed. Close() error // Addr returns the local network addr that the server is listening on. Addr() net.Addr // Accept returns new sessions. It should be called in a loop. Accept(context.Context) (Session, error) } // An EarlyListener listens for incoming QUIC connections, // and returns them before the handshake completes. type EarlyListener interface { // Close the server. All active sessions will be closed. Close() error // Addr returns the local network addr that the server is listening on. Addr() net.Addr // Accept returns new early sessions. It should be called in a loop. Accept(context.Context) (EarlySession, error) }
1
10,098
Maybe [u]int64 to be consistent with other byte-valued vars in this struct?
lucas-clemente-quic-go
go
@@ -25,7 +25,9 @@ <%- if signed_in? %> <div class="mycart-link"> <ul class="header-nav"> - <li><%= link_to("My Requests", carts_path) %> <%= image_tag "img-cart.png", width: '18px', height:'16px' %></li> + <li><%= link_to "My Requests", carts_path %> <%= image_tag "img-cart.png", width: '18px', height:'16px' %></li> + <%# TODO only show to NCR people %> + <li><%= link_to "New Request", new_ncr_proposal_path %></li> </ul> </div> <%- end %>
1
<div id='communicart_header'> <div class='container'> <div id='header-identity'> <div id="communicart_logo">Communicart</div> <h1>Approval Portal</h1> </div> <ul class='header-nav'> <%- unless signed_in? %> <li> <%= link_to 'Sign in with MyUSA', "/auth/myusa" %> </li> <%- else %> <li> <%= session['user']['email'] %> <%= image_tag 'icon-user.png', alt: ' ' %> </li> <li> <%= link_to 'Logout', "/logout", method: :post, class: 'login-link' %> <%= image_tag 'icon-logout.png', alt: 'Logout' %> </li> <%- end %> </ul> <%- if signed_in? %> <div class="mycart-link"> <ul class="header-nav"> <li><%= link_to("My Requests", carts_path) %> <%= image_tag "img-cart.png", width: '18px', height:'16px' %></li> </ul> </div> <%- end %> </div> </div> <%- unless excluded_portal_link %> <ul id='breadcrumb-nav'> <li><%= link_to "Back to main portal", carts_path %></li> </ul> <%- end %>
1
12,517
This brings up the question... do we need to start assigning users to the different use cases in some way? We don't want this link to show up for Ric, for example.
18F-C2
rb
@@ -36,9 +36,12 @@ template <typename Dtype> BasePrefetchingDataLayer<Dtype>::BasePrefetchingDataLayer( const LayerParameter& param) : BaseDataLayer<Dtype>(param), - prefetch_free_(), prefetch_full_() { - for (int i = 0; i < PREFETCH_COUNT; ++i) { - prefetch_free_.push(&prefetch_[i]); + prefetch_(param.has_data_param() ? + param.data_param().prefetch() : PREFETCH_COUNT), + prefetch_free_(), prefetch_full_(), prefetch_current_() { + for (int i = 0; i < prefetch_.size(); ++i) { + prefetch_[i].reset(new Batch<Dtype>()); + prefetch_free_.push(prefetch_[i].get()); } }
1
#include <boost/thread.hpp> #include <vector> #include "caffe/blob.hpp" #include "caffe/data_transformer.hpp" #include "caffe/internal_thread.hpp" #include "caffe/layer.hpp" #include "caffe/layers/base_data_layer.hpp" #include "caffe/proto/caffe.pb.h" #include "caffe/util/blocking_queue.hpp" namespace caffe { template <typename Dtype> BaseDataLayer<Dtype>::BaseDataLayer(const LayerParameter& param) : Layer<Dtype>(param), transform_param_(param.transform_param()) { } template <typename Dtype> void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { if (top.size() == 1) { output_labels_ = false; } else { output_labels_ = true; } data_transformer_.reset( new DataTransformer<Dtype>(transform_param_, this->phase_)); data_transformer_->InitRand(); // The subclasses should setup the size of bottom and top DataLayerSetUp(bottom, top); } template <typename Dtype> BasePrefetchingDataLayer<Dtype>::BasePrefetchingDataLayer( const LayerParameter& param) : BaseDataLayer<Dtype>(param), prefetch_free_(), prefetch_full_() { for (int i = 0; i < PREFETCH_COUNT; ++i) { prefetch_free_.push(&prefetch_[i]); } } template <typename Dtype> void BasePrefetchingDataLayer<Dtype>::LayerSetUp( const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { BaseDataLayer<Dtype>::LayerSetUp(bottom, top); // Before starting the prefetch thread, we make cpu_data and gpu_data // calls so that the prefetch thread does not accidentally make simultaneous // cudaMalloc calls when the main thread is running. In some GPUs this // seems to cause failures if we do not so. for (int i = 0; i < PREFETCH_COUNT; ++i) { prefetch_[i].data_.mutable_cpu_data(); if (this->output_labels_) { prefetch_[i].label_.mutable_cpu_data(); } } #ifndef CPU_ONLY if (Caffe::mode() == Caffe::GPU) { for (int i = 0; i < PREFETCH_COUNT; ++i) { prefetch_[i].data_.mutable_gpu_data(); if (this->output_labels_) { prefetch_[i].label_.mutable_gpu_data(); } } } #endif DLOG(INFO) << "Initializing prefetch"; this->data_transformer_->InitRand(); StartInternalThread(); DLOG(INFO) << "Prefetch initialized."; } template <typename Dtype> void BasePrefetchingDataLayer<Dtype>::InternalThreadEntry() { #ifndef CPU_ONLY cudaStream_t stream; if (Caffe::mode() == Caffe::GPU) { CUDA_CHECK(cudaStreamCreateWithFlags(&stream, cudaStreamNonBlocking)); } #endif try { while (!must_stop()) { Batch<Dtype>* batch = prefetch_free_.pop(); load_batch(batch); #ifndef CPU_ONLY if (Caffe::mode() == Caffe::GPU) { batch->data_.data().get()->async_gpu_push(stream); CUDA_CHECK(cudaStreamSynchronize(stream)); } #endif prefetch_full_.push(batch); } } catch (boost::thread_interrupted&) { // Interrupted exception is expected on shutdown } #ifndef CPU_ONLY if (Caffe::mode() == Caffe::GPU) { CUDA_CHECK(cudaStreamDestroy(stream)); } #endif } template <typename Dtype> void BasePrefetchingDataLayer<Dtype>::Forward_cpu( const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { Batch<Dtype>* batch = prefetch_full_.pop("Data layer prefetch queue empty"); // Reshape to loaded data. top[0]->ReshapeLike(batch->data_); // Copy the data caffe_copy(batch->data_.count(), batch->data_.cpu_data(), top[0]->mutable_cpu_data()); DLOG(INFO) << "Prefetch copied"; if (this->output_labels_) { // Reshape to loaded labels. top[1]->ReshapeLike(batch->label_); // Copy the labels. caffe_copy(batch->label_.count(), batch->label_.cpu_data(), top[1]->mutable_cpu_data()); } prefetch_free_.push(batch); } #ifdef CPU_ONLY STUB_GPU_FORWARD(BasePrefetchingDataLayer, Forward); #endif INSTANTIATE_CLASS(BaseDataLayer); INSTANTIATE_CLASS(BasePrefetchingDataLayer); } // namespace caffe
1
37,568
Unless I'm misunderstanding something, this should just be `prefetch_(param.data_param().prefetch())`, no? That should just use the default value if there's no explicit `data_param` or `data_param.prefetch` set, removing the need to also duplicate and hardcode `PREFETCH_COUNT`.
BVLC-caffe
cpp
@@ -55,6 +55,16 @@ func nodeStatus(node *node.AlgorandFullNode) (res v1.NodeStatus, err error) { }, nil } +func txEncode(tx transactions.Transaction, ad transactions.ApplyData) (res v1.Transaction, err error) { + switch tx.Type { + case protocol.PaymentTx: + return paymentTxEncode(tx, ad), nil + case protocol.KeyRegistrationTx: + return keyregTxEncode(tx, ad), nil + } + return v1.Transaction{}, errors.New(errUnknownTransactionType) +} + func paymentTxEncode(tx transactions.Transaction, ad transactions.ApplyData) v1.Transaction { payment := v1.PaymentTransactionType{ To: tx.Receiver.String(),
1
// Copyright (C) 2019 Algorand, Inc. // This file is part of go-algorand // // go-algorand is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as // published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // go-algorand is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with go-algorand. If not, see <https://www.gnu.org/licenses/>. package handlers import ( "errors" "fmt" "net/http" "strconv" "time" "github.com/gorilla/mux" "github.com/algorand/go-algorand/agreement" "github.com/algorand/go-algorand/config" "github.com/algorand/go-algorand/crypto" "github.com/algorand/go-algorand/daemon/algod/api/server/lib" "github.com/algorand/go-algorand/daemon/algod/api/spec/v1" "github.com/algorand/go-algorand/data/basics" "github.com/algorand/go-algorand/data/bookkeeping" "github.com/algorand/go-algorand/data/transactions" "github.com/algorand/go-algorand/ledger" "github.com/algorand/go-algorand/node" "github.com/algorand/go-algorand/protocol" ) func nodeStatus(node *node.AlgorandFullNode) (res v1.NodeStatus, err error) { stat, err := node.Status() if err != nil { return v1.NodeStatus{}, err } return v1.NodeStatus{ LastRound: uint64(stat.LastRound), LastVersion: string(stat.LastVersion), NextVersion: string(stat.NextVersion), NextVersionRound: uint64(stat.NextVersionRound), NextVersionSupported: stat.NextVersionSupported, TimeSinceLastRound: stat.TimeSinceLastRound().Nanoseconds(), CatchupTime: stat.CatchupTime.Nanoseconds(), }, nil } func paymentTxEncode(tx transactions.Transaction, ad transactions.ApplyData) v1.Transaction { payment := v1.PaymentTransactionType{ To: tx.Receiver.String(), Amount: tx.TxAmount().Raw, ToRewards: ad.ReceiverRewards.Raw, CloseRewards: ad.CloseRewards.Raw, } if tx.CloseRemainderTo != (basics.Address{}) { payment.CloseRemainderTo = tx.CloseRemainderTo.String() payment.CloseAmount = ad.ClosingAmount.Raw } return v1.Transaction{ Type: string(tx.Type), TxID: tx.ID().String(), From: tx.Src().String(), Fee: tx.TxFee().Raw, FirstRound: uint64(tx.First()), LastRound: uint64(tx.Last()), Note: tx.Aux(), Payment: &payment, FromRewards: ad.SenderRewards.Raw, GenesisID: tx.GenesisID, GenesisHash: tx.GenesisHash[:], } } func txWithStatusEncode(tr node.TxnWithStatus) v1.Transaction { s := paymentTxEncode(tr.Txn.Txn, tr.ApplyData) s.ConfirmedRound = uint64(tr.ConfirmedRound) s.PoolError = tr.PoolError return s } func blockEncode(b bookkeeping.Block, c agreement.Certificate) (v1.Block, error) { block := v1.Block{ Hash: crypto.Digest(b.Hash()).String(), PreviousBlockHash: crypto.Digest(b.Branch).String(), Seed: crypto.Digest(b.Seed()).String(), Proposer: c.Proposal.OriginalProposer.String(), Round: uint64(b.Round()), TransactionsRoot: b.TxnRoot.String(), RewardsRate: b.RewardsRate, RewardsLevel: b.RewardsLevel, RewardsResidue: b.RewardsResidue, Timestamp: b.TimeStamp, UpgradeState: v1.UpgradeState{ CurrentProtocol: string(b.CurrentProtocol), NextProtocol: string(b.NextProtocol), NextProtocolApprovals: b.NextProtocolApprovals, NextProtocolVoteBefore: uint64(b.NextProtocolVoteBefore), NextProtocolSwitchOn: uint64(b.NextProtocolSwitchOn), }, UpgradeVote: v1.UpgradeVote{ UpgradePropose: string(b.UpgradePropose), UpgradeApprove: b.UpgradeApprove, }, } // Transactions var txns []v1.Transaction payset, err := b.DecodePaysetWithAD() if err != nil { return v1.Block{}, err } for _, txn := range payset { tx := node.TxnWithStatus{ Txn: txn.SignedTxn, ConfirmedRound: b.Round(), ApplyData: txn.ApplyData, } txns = append(txns, txWithStatusEncode(tx)) } block.Transactions = v1.TransactionList{Transactions: txns} return block, nil } // Status is an httpHandler for route GET /v1/status func Status(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/status GetStatus //--- // Summary: Gets the current node status. // Produces: // - application/json // Schemes: // - http // Responses: // 200: // "$ref": '#/responses/StatusResponse' // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } nodeStatus, err := nodeStatus(ctx.Node) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedRetrievingNodeStatus, ctx.Log) return } response := StatusResponse{&nodeStatus} SendJSON(response, w, ctx.Log) } // WaitForBlock is an httpHandler for route GET /v1/status/wait-for-block-after/{round:[0-9]+} func WaitForBlock(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/status/wait-for-block-after/{round}/ WaitForBlock // --- // Summary: Gets the node status after waiting for the given round. // Description: Waits for a block to appear after round {round} and returns the node's status at the time. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: round // in: path // type: integer // format: int64 // minimum: 0 // required: true // description: The round to wait until returning status // Responses: // 200: // "$ref": '#/responses/StatusResponse' // 400: // description: Bad Request // schema: {type: string} // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryRound, err := strconv.ParseUint(mux.Vars(r)["round"], 10, 64) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedParsingRoundNumber, ctx.Log) return } select { case <-time.After(1 * time.Minute): case <-ctx.Node.Ledger().Wait(basics.Round(queryRound + 1)): } nodeStatus, err := nodeStatus(ctx.Node) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedRetrievingNodeStatus, ctx.Log) return } response := StatusResponse{&nodeStatus} SendJSON(response, w, ctx.Log) } // RawTransaction is an httpHandler for route POST /v1/transactions func RawTransaction(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation POST /v1/transactions RawTransaction // --- // Summary: Broadcasts a raw transaction to the network. // Produces: // - application/json // Consumes: // - application/x-binary // Schemes: // - http // Parameters: // - name: rawtxn // in: body // schema: // type: string // format: binary // required: true // description: The byte encoded signed transaction to broadcast to network // Responses: // 200: // "$ref": "#/responses/TransactionIDResponse" // 400: // description: Bad Request // schema: {type: string} // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } var st transactions.SignedTxn err := protocol.NewDecoder(r.Body).Decode(&st) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, err.Error(), ctx.Log) return } txid, err := ctx.Node.BroadcastSignedTxn(st) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, err.Error(), ctx.Log) return } SendJSON(TransactionIDResponse{&v1.TransactionID{TxID: txid.String()}}, w, ctx.Log) } // AccountInformation is an httpHandler for route GET /v1/account/{addr:[A-Z0-9]{KeyLength}} func AccountInformation(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/account/{address} AccountInformation // --- // Summary: Get account information. // Description: Given a specific account public key, this call returns the accounts status, balance and spendable amounts // Produces: // - application/json // Schemes: // - http // Parameters: // - name: address // in: path // type: string // pattern: "[A-Z0-9]{58}" // required: true // description: An account public key // Responses: // 200: // "$ref": '#/responses/AccountInformationResponse' // 400: // description: Bad Request // schema: {type: string} // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryAddr := mux.Vars(r)["addr"] if queryAddr == "" { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoAccountSpecified), errNoAccountSpecified, ctx.Log) return } addr, err := basics.UnmarshalChecksumAddress(queryAddr) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedToParseAddress, ctx.Log) return } myLedger := ctx.Node.Ledger() lastRound := myLedger.Latest() record, err := myLedger.Lookup(lastRound, basics.Address(addr)) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedLookingUpLedger, ctx.Log) return } recordWithoutPendingRewards, err := myLedger.LookupWithoutRewards(lastRound, basics.Address(addr)) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedLookingUpLedger, ctx.Log) return } amount := record.MicroAlgos amountWithoutPendingRewards := recordWithoutPendingRewards.MicroAlgos pendingRewards, overflowed := basics.OSubA(amount, amountWithoutPendingRewards) if overflowed { err = fmt.Errorf("overflowed pending rewards: %v - %v", amount, amountWithoutPendingRewards) lib.ErrorResponse(w, http.StatusInternalServerError, err, errInternalFailure, ctx.Log) return } accountInfo := v1.Account{ Round: uint64(lastRound), Address: addr.String(), Amount: amount.Raw, PendingRewards: pendingRewards.Raw, AmountWithoutPendingRewards: amountWithoutPendingRewards.Raw, Rewards: record.RewardedMicroAlgos.Raw, Status: record.Status.String(), } SendJSON(AccountInformationResponse{&accountInfo}, w, ctx.Log) } // TransactionInformation is an httpHandler for route GET /v1/account/{addr:[A-Z0-9]{KeyLength}}/transaction/{txid:[A-Z0-9]+} func TransactionInformation(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/account/{address}/transaction/{txid} TransactionInformation // --- // Summary: Get a specific confirmed transaction. // Description: > // Given a wallet address and a transaction id, it returns the confirmed transaction // information. This call scans up to <CurrentProtocol>.MaxTxnLife blocks in the past. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: address // in: path // type: string // pattern: "[A-Z0-9]{58}" // required: true // description: An account public key // - name: txid // in: path // type: string // pattern: "[A-Z0-9]+" // required: true // description: A transaction id // Responses: // 200: // "$ref": '#/responses/TransactionResponse' // 400: // description: Bad Request // schema: {type: string} // 404: // description: Transaction Not Found // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryTxID := mux.Vars(r)["txid"] if queryTxID == "" { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoTxnSpecified), errNoTxnSpecified, ctx.Log) return } txID := transactions.Txid{} if txID.UnmarshalText([]byte(queryTxID)) != nil { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoTxnSpecified), errNoTxnSpecified, ctx.Log) return } queryAddr := mux.Vars(r)["addr"] if queryAddr == "" { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoAccountSpecified), errNoAccountSpecified, ctx.Log) return } addr, err := basics.UnmarshalChecksumAddress(queryAddr) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errFailedToParseAddress), errFailedToParseAddress, ctx.Log) return } latestRound := ctx.Node.Ledger().Latest() stat, err := ctx.Node.Status() if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedRetrievingNodeStatus, ctx.Log) return } proto := config.Consensus[stat.LastVersion] // non-Archival nodes keep proto.MaxTxnLife blocks around, // so without the + 1 in the below calculation, // Node.GetTransaction will query 1 round more than is kept around start := latestRound - basics.Round(proto.MaxTxnLife) + 1 if latestRound < basics.Round(proto.MaxTxnLife) { start = 0 } if txn, ok := ctx.Node.GetTransaction(addr, txID, start, latestRound); ok { var responseTxs v1.Transaction responseTxs = txWithStatusEncode(txn) response := TransactionResponse{ Body: &responseTxs, } SendJSON(response, w, ctx.Log) return } // We didn't find it, return a failure lib.ErrorResponse(w, http.StatusNotFound, err, errTransactionNotFound, ctx.Log) return } // PendingTransactionInformation is an httpHandler for route GET /v1/transactions/pending/{txid:[A-Z0-9]+} func PendingTransactionInformation(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/transactions/pending/{txid} PendingTransactionInformation // --- // Summary: Get a specific pending transaction. // Description: > // Given a transaction id of a recently submitted transaction, it returns information // about it. There are several cases when this might succeed: // // - transaction committed (committed round > 0) // - transaction still in the pool (committed round = 0, pool error = "") // - transaction removed from pool due to error (committed round = 0, pool error != "") // // Or the transaction may have happened sufficiently long ago that the // node no longer remembers it, and this will return an error. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: txid // in: path // type: string // pattern: "[A-Z0-9]+" // required: true // description: A transaction id // Responses: // 200: // "$ref": '#/responses/TransactionResponse' // 400: // description: Bad Request // schema: {type: string} // 404: // description: Transaction Not Found // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryTxID := mux.Vars(r)["txid"] if queryTxID == "" { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoTxnSpecified), errNoTxnSpecified, ctx.Log) return } txID := transactions.Txid{} if txID.UnmarshalText([]byte(queryTxID)) != nil { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoTxnSpecified), errNoTxnSpecified, ctx.Log) return } if txn, ok := ctx.Node.GetPendingTransaction(txID); ok { var responseTxs v1.Transaction responseTxs = txWithStatusEncode(txn) response := TransactionResponse{ Body: &responseTxs, } SendJSON(response, w, ctx.Log) return } // We didn't find it, return a failure lib.ErrorResponse(w, http.StatusNotFound, errors.New(errTransactionNotFound), errTransactionNotFound, ctx.Log) return } // GetPendingTransactions is an httpHandler for route GET /v1/transactions/pending. func GetPendingTransactions(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/transactions/pending GetPendingTransactions // --- // Summary: Get a list of unconfirmed transactions currently in the transaction pool. // Description: > // Get the list of pending transactions, sorted by priority, // in decreasing order, truncated at the end at MAX. If MAX = 0, // returns all pending transactions. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: max // in: query // type: integer // format: int64 // minimum: 0 // required: false // description: Truncated number of transactions to display. If max=0, returns all pending txns. // Responses: // "200": // "$ref": '#/responses/PendingTransactionsResponse' // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } max, err := strconv.ParseUint(r.FormValue("max"), 10, 64) if err != nil { max = 0 } txs, err := ctx.Node.GetPendingTxnsFromPool() if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedLookingUpTransactionPool, ctx.Log) return } totalTxns := uint64(len(txs)) if max > 0 && totalTxns > max { // we expose this truncating mechanism for the client only, for the flexibility // to avoid dumping the whole pool over REST or in a cli. There is no need to optimize // fetching a smaller transaction set at a lower level. txs = txs[:max] } responseTxs := make([]v1.Transaction, len(txs)) for i, twr := range txs { responseTxs[i] = paymentTxEncode(twr.Txn, transactions.ApplyData{}) } response := PendingTransactionsResponse{ Body: &v1.PendingTransactions{ TruncatedTxns: v1.TransactionList{ Transactions: responseTxs, }, TotalTxns: totalTxns, }, } SendJSON(response, w, ctx.Log) } // SuggestedFee is an httpHandler for route GET /v1/transactions/fee func SuggestedFee(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/transactions/fee SuggestedFee // --- // Summary: Get the suggested fee // Description: > // Suggested Fee is returned in units of micro-Algos per byte. // Suggested Fee may fall to zero but submitted transactions // must still have a fee of at least MinTxnFee for the current // network protocol. // Produces: // - application/json // Schemes: // - http // Responses: // "200": // "$ref": '#/responses/TransactionFeeResponse' // 401: { description: Invalid API Token } // default: { description: Unknown Error } fee := v1.TransactionFee{Fee: ctx.Node.SuggestedFee().Raw} SendJSON(TransactionFeeResponse{&fee}, w, ctx.Log) } // SuggestedParams is an httpHandler for route GET /v1/transactions/params func SuggestedParams(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/transactions/params TransactionParams // --- // Summary: Get parameters for constructing a new transaction // Produces: // - application/json // Schemes: // - http // Responses: // "200": // "$ref": '#/responses/TransactionParamsResponse' // 401: { description: Invalid API Token } // default: { description: Unknown Error } stat, err := ctx.Node.Status() if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedRetrievingNodeStatus, ctx.Log) return } gh := ctx.Node.GenesisHash() var params v1.TransactionParams params.Fee = ctx.Node.SuggestedFee().Raw params.GenesisID = ctx.Node.GenesisID() params.GenesisHash = gh[:] params.LastRound = uint64(stat.LastRound) params.ConsensusVersion = string(stat.LastVersion) proto := config.Consensus[stat.LastVersion] params.MinTxnFee = proto.MinTxnFee SendJSON(TransactionParamsResponse{&params}, w, ctx.Log) } // GetBlock is an httpHandler for route GET /v1/block/{round} func GetBlock(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/block/{round} GetBlock // --- // Summary: Get the block for the given round. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: round // in: path // type: integer // format: int64 // minimum: 0 // required: true // description: The round from which to fetch block information. // Responses: // 200: // "$ref": '#/responses/BlockResponse' // 400: // description: Bad Request // schema: {type: string} // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryRound, err := strconv.ParseUint(mux.Vars(r)["round"], 10, 64) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedParsingRoundNumber, ctx.Log) return } b, c, err := ctx.Node.Ledger().BlockCert(basics.Round(queryRound)) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedLookingUpLedger, ctx.Log) return } block, err := blockEncode(b, c) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errInternalFailure, ctx.Log) return } SendJSON(BlockResponse{&block}, w, ctx.Log) } // GetSupply is an httpHandler for route GET /v1/ledger/supply func GetSupply(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/ledger/supply GetSupply //--- // Summary: Get the current supply reported by the ledger. // Produces: // - application/json // Schemes: // - http // Responses: // 200: // "$ref": '#/responses/SupplyResponse' // 401: { description: Invalid API Token } // default: { description: Unknown Error } latest := ctx.Node.Ledger().Latest() totals, err := ctx.Node.Ledger().Totals(latest) if err != nil { err = fmt.Errorf("GetSupply(): round %d failed: %v", latest, err) lib.ErrorResponse(w, http.StatusInternalServerError, err, errInternalFailure, ctx.Log) return } supply := v1.Supply{ Round: uint64(latest), TotalMoney: totals.Participating().Raw, OnlineMoney: totals.Online.Money.Raw, } SendJSON(SupplyResponse{&supply}, w, ctx.Log) } func parseTime(t string) (res time.Time, err error) { // check for just date res, err = time.Parse("2006-01-02", t) if err == nil { return } // check for date and time res, err = time.Parse(time.RFC3339, t) if err == nil { return } return } // Transactions is an httpHandler for route GET /v1/account/{addr:[A-Z0-9]+}/transactions func Transactions(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/account/{address}/transactions Transactions // --- // Summary: Get a list of confirmed transactions. // Description: Returns the list of confirmed transactions between within a date range. This call is available only when the indexer is running. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: address // in: path // type: string // pattern: "[A-Z0-9]{58}" // required: true // description: An account public key // - name: firstRound // in: query // type: integer // format: int64 // minimum: 0 // required: false // description: Do not fetch any transactions before this round. // - name: lastRound // in: query // type: integer // format: int64 // minimum: 0 // required: false // description: Do not fetch any transactions after this round. // - name: fromDate // in: query // type: string // format: date // required: false // description: Do not fetch any transactions before this date. (enabled only with indexer) // - name: toDate // in: query // type: string // format: date // required: false // description: Do not fetch any transactions after this date. (enabled only with indexer) // - name: max // in: query // type: integer // format: int64 // required: false // description: maximum transactions to show (default to 100) // Responses: // 200: // "$ref": '#/responses/TransactionsResponse' // 400: // description: Bad Request // schema: {type: string} // 500: // description: Internal Error // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } queryAddr := mux.Vars(r)["addr"] addr, err := basics.UnmarshalChecksumAddress(queryAddr) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedToParseAddress, ctx.Log) return } max, err := strconv.ParseUint(r.FormValue("max"), 10, 64) if err != nil { max = 100 } // Get different params firstRound := r.FormValue("firstRound") lastRound := r.FormValue("lastRound") fromDate := r.FormValue("fromDate") toDate := r.FormValue("toDate") var rounds []uint64 var txs []node.TxnWithStatus // Were rounds provided? if firstRound != "" && lastRound != "" { // Are they valid? fR, err := strconv.ParseUint(firstRound, 10, 64) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedParsingRoundNumber, ctx.Log) return } lR, err := strconv.ParseUint(lastRound, 10, 64) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errFailedParsingRoundNumber, ctx.Log) return } txs, err = ctx.Node.ListTxns(addr, basics.Round(fR), basics.Round(lR)) if err != nil { switch err.(type) { case ledger.ErrNoEntry: if !ctx.Node.IsArchival() { lib.ErrorResponse(w, http.StatusInternalServerError, err, errBlockHashBeenDeletedArchival, ctx.Log) return } } lib.ErrorResponse(w, http.StatusInternalServerError, err, err.Error(), ctx.Log) return } } else { // is indexer on? indexer, err := ctx.Node.Indexer() if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, errNoRoundsSpecified, ctx.Log) return } // Were dates provided? if fromDate != "" && toDate != "" { fd, err := parseTime(fromDate) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, err.Error(), ctx.Log) return } td, err := parseTime(toDate) if err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, err.Error(), ctx.Log) return } rounds, err = indexer.GetRoundsByAddressAndDate(addr.String(), max, fd.Unix(), td.Unix()) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, err.Error(), ctx.Log) return } } else { // return last [max] transactions rounds, err = indexer.GetRoundsByAddress(addr.String(), max) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedGettingInformationFromIndexer, ctx.Log) return } } } if len(rounds) > 0 { for _, rnd := range rounds { txns, _ := ctx.Node.ListTxns(addr, basics.Round(rnd), basics.Round(rnd)) txs = append(txs, txns...) // They may be more txns in the round than requested, break. if uint64(len(txs)) > max { break } } } // clip length to [max] if uint64(len(txs)) > max { txs = txs[:max] } responseTxs := make([]v1.Transaction, len(txs)) for i, twr := range txs { responseTxs[i] = txWithStatusEncode(twr) } response := TransactionsResponse{ &v1.TransactionList{ Transactions: responseTxs, }, } SendJSON(response, w, ctx.Log) } // GetTransactionByID is an httpHandler for route GET /v1/transaction/{txid} func GetTransactionByID(ctx lib.ReqContext, w http.ResponseWriter, r *http.Request) { // swagger:operation GET /v1/transaction/{txid} Transaction // --- // Summary: Get an information of a single transaction. // Description: Returns the transaction information of the given txid. Works only if the indexer is enabled. // Produces: // - application/json // Schemes: // - http // Parameters: // - name: txid // in: path // type: string // pattern: "[A-Z0-9]+" // required: true // description: A transaction id // Responses: // 200: // "$ref": '#/responses/TransactionResponse' // 400: // description: Bad Request // schema: {type: string} // 404: // description: Transaction Not Found // schema: {type: string} // 401: { description: Invalid API Token } // default: { description: Unknown Error } indexer, err := ctx.Node.Indexer() if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errIndexerNotRunning, ctx.Log) return } queryTxID := mux.Vars(r)["txid"] if queryTxID == "" { lib.ErrorResponse(w, http.StatusBadRequest, fmt.Errorf(errNoTxnSpecified), errNoTxnSpecified, ctx.Log) return } var txID transactions.Txid if err := txID.UnmarshalText([]byte(queryTxID)); err != nil { lib.ErrorResponse(w, http.StatusBadRequest, err, err.Error(), ctx.Log) return } rnd, err := indexer.GetRoundByTXID(queryTxID) if err != nil { lib.ErrorResponse(w, http.StatusInternalServerError, err, errFailedGettingInformationFromIndexer, ctx.Log) return } if txn, err := ctx.Node.GetTransactionByID(txID, basics.Round(rnd)); err == nil { var responseTxs v1.Transaction responseTxs = txWithStatusEncode(txn) response := TransactionResponse{ Body: &responseTxs, } SendJSON(response, w, ctx.Log) return } // We didn't find it, return a failure lib.ErrorResponse(w, http.StatusNotFound, errors.New(errTransactionNotFound), errTransactionNotFound, ctx.Log) return }
1
35,877
nit : move this into a default clause in the previous switch statement.
algorand-go-algorand
go
@@ -27,14 +27,12 @@ var formatter = this; string = function(value) { if (value != null) { value = value.replace(/\\/g, '\\\\'); - value = value.replace(/\"/g, '\\"'); + value = value.replace(/\'/g, '\\\''); value = value.replace(/\r/g, '\\r'); value = value.replace(/\n/g, '\\n'); - value = value.replace(/@/g, '\\@'); - value = value.replace(/\$/g, '\\$'); - return '"' + value + '"'; + return "'" + value + "'"; } else { - return '""'; + return "'"; } }
1
/* * Format for Selenium Remote Control Perl client. */ var subScriptLoader = Components.classes["@mozilla.org/moz/jssubscript-loader;1"].getService(Components.interfaces.mozIJSSubScriptLoader); subScriptLoader.loadSubScript('chrome://selenium-ide/content/formats/remoteControl.js', this); this.name = "perl-rc"; // method name will not be used in this format function testMethodName(testName) { return testName; } var originalFormatCommands = formatCommands; formatCommands = function(commands) { this.tests = 0; var lines = originalFormatCommands(commands); if (this.tests == 0) { lines += addIndent("pass;\n"); } return lines; } var formatter = this; string = function(value) { if (value != null) { value = value.replace(/\\/g, '\\\\'); value = value.replace(/\"/g, '\\"'); value = value.replace(/\r/g, '\\r'); value = value.replace(/\n/g, '\\n'); value = value.replace(/@/g, '\\@'); value = value.replace(/\$/g, '\\$'); return '"' + value + '"'; } else { return '""'; } } variableName = function(value) { return "$" + value; } concatString = function(array) { return array.join(" . "); } function assertTrue(expression) { if (formatter.assertOrVerifyFailureOnNext) { return expression.toString() + " or die;"; } else { formatter.tests++; if (expression.assertable) { expression.suffix = "_ok"; return expression.toString() + ";"; } else { return "ok(" + expression.toString() + ");"; } } } function assertFalse(expression) { if (formatter.assertOrVerifyFailureOnNext) { return expression.toString() + " and die;"; } else { formatter.tests++; return "ok(not " + expression.toString() + ");"; } } var verifyTrue = assertTrue; var verifyFalse = assertFalse; function joinExpression(expression) { return "join(',', " + expression.toString() + ")"; } function assignToVariable(type, variable, expression) { if (type == 'String[]') { return "my @" + variable + " = " + expression.toString(); } else { return "my $" + variable + " = " + expression.toString(); } } function waitFor(expression) { return "WAIT: {\n" + indents(1) + "for (1..60) {\n" + indents(2) + "if (eval { " + expression.toString() + " }) { pass; last WAIT }\n" + indents(2) + "sleep(1);\n" + indents(1) + "}\n" + indents(1) + 'fail("timeout");\n' + "}"; } function assertOrVerifyFailure(line, isAssert) { return 'dies_ok { ' + line + ' };'; } Equals.prototype.toString = function() { return this.e1.toString() + " eq " + this.e2.toString(); } NotEquals.prototype.toString = function() { return this.e1.toString() + " ne " + this.e2.toString(); } Equals.prototype.assert = function() { if (formatter.assertOrVerifyFailureOnNext) { return assertTrue(this); } else { formatter.tests++; if (!this.e2.args) { return "is(" + this.e1 + ", " + this.e2 + ");"; } else { var expression = this.e2; expression.suffix = "_is"; expression.noGet = true; expression.args.push(this.e1); return expression.toString() + ";"; } } } Equals.prototype.verify = Equals.prototype.assert; NotEquals.prototype.assert = function() { if (formatter.assertOrVerifyFailureOnNext) { return assertTrue(this); } else { if (!this.e2.args) { return "isnt(" + this.e1 + ", " + this.e2 + ");"; } else { formatter.tests++; var expression = this.e2; expression.suffix = "_isnt"; expression.noGet = true; expression.args.push(this.e1); return expression.toString() + ";"; } } } NotEquals.prototype.verify = NotEquals.prototype.assert; RegexpMatch.prototype.toString = function() { return this.expression + " =~ /" + this.pattern.replace(/\//g, "\\/") + "/"; } RegexpNotMatch.prototype.toString = function() { return notOperator() + "(" + RegexpMatch.prototype.toString.call(this) + ")"; } function ifCondition(expression, callback) { return "if (" + expression.toString() + ") {\n" + callback() + "}"; } function pause(milliseconds) { return "sleep(" + (parseInt(milliseconds) / 1000) + ");"; } function echo(message) { return "print(" + xlateArgument(message) + ' . "\\n");' } function statement(expression) { if (!formatter.assertOrVerifyFailureOnNext) { formatter.tests++; expression.suffix = "_ok"; } return expression.toString() + ";"; } function array(value) { var str = '('; for (var i = 0; i < value.length; i++) { str += string(value[i]); if (i < value.length - 1) str += ", "; } str += ')'; return str; } function nonBreakingSpace() { return "\"\\x{00A0}\""; } CallSelenium.prototype.assertable = true; CallSelenium.prototype.toString = function() { var result = ''; if (this.negative) { result += '!'; } if (options.receiver) { result += options.receiver + '->'; } var command = underscore(this.message); if (this.noGet) { command = command.replace(/^get_/, ''); } result += command; if (this.suffix) { result += this.suffix; } result += '('; for (var i = 0; i < this.args.length; i++) { result += this.args[i]; if (i < this.args.length - 1) { result += ', '; } } result += ')'; return result; } function formatComment(comment) { return comment.comment.replace(/.+/mg, function(str) { return "# " + str; }); } this.options = { receiver: "$sel", rcHost: "localhost", rcPort: "4444", environment: "*chrome", header: 'use strict;\n' + 'use warnings;\n' + 'use Time::HiRes qw(sleep);\n' + 'use Test::WWW::Selenium;\n' + 'use Test::More "no_plan";\n' + 'use Test::Exception;\n' + '\n' + 'my ${receiver} = Test::WWW::Selenium->new( host => "${rcHost}", \n' + ' port => ${rcPort}, \n' + ' browser => "${environment}", \n' + ' browser_url => "${baseURL}" );\n' + '\n', footer: "", indent: "4", initialIndents: '0' }; this.configForm = '<description>Variable for Selenium instance</description>' + '<textbox id="options_receiver" />' + '<description>Selenium RC host</description>' + '<textbox id="options_rcHost" />' + '<description>Selenium RC port</description>' + '<textbox id="options_rcPort" />' + '<description>Environment</description>' + '<textbox id="options_environment" />' + '<description>Header</description>' + '<textbox id="options_header" multiline="true" flex="1" rows="4"/>' + '<description>Footer</description>' + '<textbox id="options_footer" multiline="true" flex="1" rows="4"/>' + '<description>Indent</description>' + '<menulist id="options_indent"><menupopup>' + '<menuitem label="Tab" value="tab"/>' + '<menuitem label="1 space" value="1"/>' + '<menuitem label="2 spaces" value="2"/>' + '<menuitem label="3 spaces" value="3"/>' + '<menuitem label="4 spaces" value="4"/>' + '<menuitem label="5 spaces" value="5"/>' + '<menuitem label="6 spaces" value="6"/>' + '<menuitem label="7 spaces" value="7"/>' + '<menuitem label="8 spaces" value="8"/>' + '</menupopup></menulist>';
1
10,826
It should be return "''"; I will fix it
SeleniumHQ-selenium
js
@@ -43,14 +43,14 @@ type ClusterEvent interface{} // Specific type of cluster event emitted when new member joins cluster type NewMemberEvent struct { endpointsInfo - Id string + ID string } // Specific type of cluster event emitted when leadership is transferred. // It is not always loss or acquire of leadership of this specific node type LeadershipEvent struct { Held bool - LeaderId string + LeaderID string LeaderEndpoints []string }
1
package hub import ( "bytes" "context" "crypto/tls" "encoding/json" "fmt" "net" "reflect" "strings" "sync" "time" "github.com/docker/leadership" "github.com/docker/libkv" "github.com/docker/libkv/store" "github.com/docker/libkv/store/boltdb" "github.com/docker/libkv/store/consul" log "github.com/noxiouz/zapctx/ctxlog" "github.com/pkg/errors" "github.com/satori/uuid" pb "github.com/sonm-io/core/proto" "github.com/sonm-io/core/util" "github.com/sonm-io/core/util/xgrpc" "go.uber.org/zap" "golang.org/x/sync/errgroup" "google.golang.org/grpc" "google.golang.org/grpc/credentials" ) // ClusterEvent describes an event that can produce the cluster. // // Possible types are: // - `NewMemberEvent` when new member joins cluster // - `LeadershipEvent` when leadership is transferred // - `T` types for other registered synchronizable entities. // - `error` on any unrecoverable error, after that channel is closed // and the user should call Run once more to enable synchronization type ClusterEvent interface{} // Specific type of cluster event emitted when new member joins cluster type NewMemberEvent struct { endpointsInfo Id string } // Specific type of cluster event emitted when leadership is transferred. // It is not always loss or acquire of leadership of this specific node type LeadershipEvent struct { Held bool LeaderId string LeaderEndpoints []string } type Cluster interface { // Starts synchronization process. Can be called multiple times after error is received in EventChannel Run() error Close() // IsLeader returns true if this cluster is a leader, i.e. we rule the // synchronization process. IsLeader() bool LeaderClient() (pb.HubClient, error) RegisterAndLoadEntity(name string, prototype interface{}) error Synchronize(entity interface{}) error // Fetch current cluster members Members() ([]NewMemberEvent, error) } // Returns a cluster writer interface if this node is a master, event channel // otherwise. // Should be recalled when a cluster's master/slave state changes. // The channel is closed when the specified context is canceled. func NewCluster(ctx context.Context, cfg *ClusterConfig, workerEndpoint string, creds credentials.TransportCredentials) (Cluster, <-chan ClusterEvent, error) { clusterStore, err := makeStore(ctx, cfg) if err != nil { return nil, nil, err } err = clusterStore.Put(cfg.SynchronizableEntitiesPrefix, []byte{}, &store.WriteOptions{IsDir: true}) if err != nil { return nil, nil, err } clientEndpoints, workerEndpoints, err := getEndpoints(cfg, workerEndpoint) if err != nil { return nil, nil, err } c := cluster{ parentCtx: ctx, cfg: cfg, registeredEntities: make(map[string]reflect.Type), entityNames: make(map[reflect.Type]string), store: clusterStore, isLeader: true, id: uuid.NewV1().String(), endpoints: &endpointsInfo{ Client: clientEndpoints, Worker: workerEndpoints, }, clients: make(map[string]*client), clusterEndpoints: make(map[string]*endpointsInfo), eventChannel: make(chan ClusterEvent, 100), creds: creds, } if cfg.Failover { c.isLeader = false } c.ctx, c.cancel = context.WithCancel(c.parentCtx) c.registerMember(c.id, clientEndpoints, workerEndpoints) return &c, c.eventChannel, nil } type client struct { client pb.HubClient conn *grpc.ClientConn } type endpointsInfo struct { Client []string Worker []string } type cluster struct { parentCtx context.Context ctx context.Context cancel context.CancelFunc cfg *ClusterConfig registeredEntitiesMu sync.RWMutex registeredEntities map[string]reflect.Type entityNames map[reflect.Type]string store store.Store // self info isLeader bool id string endpoints *endpointsInfo leaderLock sync.RWMutex clients map[string]*client clusterEndpoints map[string]*endpointsInfo leaderId string eventChannel chan ClusterEvent creds credentials.TransportCredentials } func (c *cluster) Close() { if c.cancel != nil { c.cancel() } } func (c *cluster) Run() error { c.Close() w := errgroup.Group{} c.ctx, c.cancel = context.WithCancel(c.parentCtx) if c.cfg.Failover { c.isLeader = false w.Go(c.election) w.Go(c.announce) w.Go(c.hubWatch) w.Go(c.leaderWatch) w.Go(c.hubGC) } else { log.G(c.ctx).Info("running in dev single-server mode") } w.Go(c.watchEvents) return w.Wait() } func (c *cluster) IsLeader() bool { return c.isLeader } // Get GRPC hub client to current leader func (c *cluster) LeaderClient() (pb.HubClient, error) { log.G(c.ctx).Debug("fetching leader client") c.leaderLock.RLock() defer c.leaderLock.RUnlock() endpts, ok := c.clusterEndpoints[c.leaderId] if !ok || len(endpts.Client) == 0 { log.G(c.ctx).Warn("can not determine leader") return nil, errors.New("can not determine leader") } client, ok := c.clients[c.leaderId] if !ok || client == nil { log.G(c.ctx).Warn("not connected to leader") return nil, errors.New("not connected to leader") } return client.client, nil } func (c *cluster) RegisterAndLoadEntity(name string, prototype interface{}) error { c.registeredEntitiesMu.Lock() defer c.registeredEntitiesMu.Unlock() t := reflect.Indirect(reflect.ValueOf(prototype)).Type() c.registeredEntities[name] = t c.entityNames[t] = name keyName := c.cfg.SynchronizableEntitiesPrefix + "/" + name exists, err := c.store.Exists(keyName) if err != nil { return errors.Wrap(err, fmt.Sprintf("could not check entity %s for existance in storage", name)) } if !exists { return nil } kvPair, err := c.store.Get(keyName) if err != nil { return errors.Wrap(err, fmt.Sprintf("could not fetch entity %s initial value from storage", name)) } err = json.Unmarshal(kvPair.Value, prototype) if err != nil { return errors.Wrap(err, fmt.Sprintf("could not unmarshal entity %s from storage data", name)) } return nil } func (c *cluster) Synchronize(entity interface{}) error { if !c.isLeader { log.G(c.ctx).Warn("failed to synchronize entity - not a leader") return errors.New("not a leader") } name, err := c.nameByEntity(entity) if err != nil { log.G(c.ctx).Warn("unknown synchronizable entity", zap.Any("entity", entity)) return err } data, err := json.Marshal(entity) if err != nil { log.G(c.ctx).Warn("could not marshal entity", zap.Error(err)) return err } log.G(c.ctx).Debug("synchronizing entity", zap.Any("entity", entity), zap.ByteString("marshalled", data)) c.store.Put(c.cfg.SynchronizableEntitiesPrefix+"/"+name, data, &store.WriteOptions{}) return nil } func (c *cluster) Members() ([]NewMemberEvent, error) { result := make([]NewMemberEvent, 0) c.leaderLock.RLock() defer c.leaderLock.RUnlock() for id, endpts := range c.clusterEndpoints { result = append(result, NewMemberEvent{endpointsInfo: *endpts, Id: id}) } return result, nil } func (c *cluster) election() error { candidate := leadership.NewCandidate(c.store, c.cfg.LeaderKey, c.id, c.cfg.LeaderTTL) electedCh, errCh := candidate.RunForElection() log.G(c.ctx).Info("starting leader election goroutine") for { select { case c.isLeader = <-electedCh: log.G(c.ctx).Debug("election event", zap.Bool("isLeader", c.isLeader)) // Do not possibly block on event channel to prevent stale leadership data go c.emitLeadershipEvent() case err := <-errCh: log.G(c.ctx).Error("election failure", zap.Error(err)) c.close(errors.WithStack(err)) return err case <-c.ctx.Done(): candidate.Stop() return nil } } } // Blocks in endless cycle watching for leadership. // When the leadership is changed stores new leader id in cluster func (c *cluster) leaderWatch() error { log.G(c.ctx).Info("starting leader watch goroutine") follower := leadership.NewFollower(c.store, c.cfg.LeaderKey) leaderCh, errCh := follower.FollowElection() for { select { case <-c.ctx.Done(): follower.Stop() return nil case err := <-errCh: log.G(c.ctx).Error("leader watch failure", zap.Error(err)) c.close(errors.WithStack(err)) return err case leaderId := <-leaderCh: c.leaderLock.Lock() c.leaderId = leaderId c.leaderLock.Unlock() c.emitLeadershipEvent() } } } func (c *cluster) announce() error { log.G(c.ctx).Info("starting announce goroutine", zap.Any("endpointsInfo", c.endpoints), zap.String("ID", c.id)) endpointsData, _ := json.Marshal(c.endpoints) ticker := time.NewTicker(c.cfg.AnnounceTTL) defer ticker.Stop() for { select { case <-ticker.C: err := c.store.Put(c.cfg.MemberListKey+"/"+c.id, endpointsData, &store.WriteOptions{TTL: c.cfg.AnnounceTTL}) if err != nil { log.G(c.ctx).Error("could not update announce", zap.Error(err)) c.close(errors.WithStack(err)) return err } case <-c.ctx.Done(): return nil } } } func (c *cluster) hubWatch() error { log.G(c.ctx).Info("starting member watch goroutine") stopCh := make(chan struct{}) listener, err := c.store.WatchTree(c.cfg.MemberListKey, stopCh) if err != nil { c.close(err) } for { select { case members, ok := <-listener: if !ok { err := errors.WithStack(errors.New("hub watcher closed")) c.close(err) return err } else { for _, member := range members { if member.Value == nil { log.G(c.ctx).Debug("received cluster member with nil Value, skipping (this can happen due to consul peculiarities)", zap.Any("member", member)) continue } log.G(c.ctx).Debug("received cluster member, registering", zap.Any("member", member)) err := c.registerMemberFromKV(member) if err != nil { log.G(c.ctx).Warn("trash data in cluster members folder: ", zap.Any("kvPair", member), zap.Error(err)) } } } case <-c.ctx.Done(): close(stopCh) return nil } } } func (c *cluster) checkHub(id string) error { if id == c.id { return nil } exists, err := c.store.Exists(c.cfg.MemberListKey + "/" + id) if err != nil { return err } if !exists { log.G(c.ctx).Info("hub is offline, removing", zap.String("hubId", id)) c.leaderLock.Lock() defer c.leaderLock.Unlock() if cli, ok := c.clients[id]; ok { cli.conn.Close() delete(c.clients, id) delete(c.clusterEndpoints, id) } } return nil } func (c *cluster) hubGC() error { log.G(c.ctx).Info("starting hub GC goroutine") t := time.NewTicker(c.cfg.MemberGCPeriod) defer t.Stop() for { select { case <-t.C: c.leaderLock.RLock() idsToCheck := make([]string, 0) for id := range c.clients { idsToCheck = append(idsToCheck, id) } c.leaderLock.RUnlock() for _, id := range idsToCheck { err := c.checkHub(id) if err != nil { log.G(c.ctx).Warn("failed to check hub", zap.String("hubId", id), zap.Error(err)) } else { log.G(c.ctx).Info("checked hub", zap.String("hubId", id)) } } case <-c.ctx.Done(): return nil } } } //TODO: extract this to some kind of store wrapper over boltdb func (c *cluster) watchEventsTree(stopCh <-chan struct{}) (<-chan []*store.KVPair, error) { if c.cfg.Failover { return c.store.WatchTree(c.cfg.SynchronizableEntitiesPrefix, stopCh) } opts := store.WriteOptions{ IsDir: true, } empty := make([]byte, 0) c.store.Put(c.cfg.SynchronizableEntitiesPrefix, empty, &opts) ch := make(chan []*store.KVPair, 1) data := make(map[string]*store.KVPair) updater := func() error { changed := false pairs, err := c.store.List(c.cfg.SynchronizableEntitiesPrefix) if err != nil { return err } filteredPairs := make([]*store.KVPair, 0) for _, pair := range pairs { if pair.Key == c.cfg.SynchronizableEntitiesPrefix { continue } filteredPairs = append(filteredPairs, pair) cur, ok := data[pair.Key] if !ok || !bytes.Equal(cur.Value, pair.Value) { changed = true data[pair.Key] = pair } } if changed { ch <- filteredPairs } return nil } if err := updater(); err != nil { return nil, err } go func() { t := time.NewTicker(time.Second * 1) defer t.Stop() for { select { case <-c.ctx.Done(): return case <-t.C: err := updater() if err != nil { c.close(err) } } } }() return ch, nil } func (c *cluster) watchEvents() error { log.G(c.ctx).Info("subscribing on sync folder") watchStopChannel := make(chan struct{}) ch, err := c.watchEventsTree(watchStopChannel) if err != nil { c.close(err) return err } for { select { case <-c.ctx.Done(): close(watchStopChannel) return nil case kvList, ok := <-ch: if !ok { err := errors.WithStack(errors.New("watch channel is closed")) c.close(err) return err } for _, kv := range kvList { name := fetchNameFromPath(kv.Key) t, err := c.typeByName(name) if err != nil { log.G(c.ctx).Warn("unknown synchronizable entity", zap.String("entity", name)) continue } value := reflect.New(t) err = json.Unmarshal(kv.Value, value.Interface()) if err != nil { log.G(c.ctx).Warn("can not unmarshal entity", zap.Error(err)) } else { log.G(c.ctx).Debug("received cluster event", zap.String("name", name), zap.Any("value", value.Interface())) c.eventChannel <- reflect.Indirect(value).Interface() } } } } } func (c *cluster) nameByEntity(entity interface{}) (string, error) { c.registeredEntitiesMu.RLock() defer c.registeredEntitiesMu.RUnlock() t := reflect.Indirect(reflect.ValueOf(entity)).Type() name, ok := c.entityNames[t] if !ok { return "", errors.New("entity " + t.String() + " is not registered") } return name, nil } func (c *cluster) typeByName(name string) (reflect.Type, error) { c.registeredEntitiesMu.RLock() defer c.registeredEntitiesMu.RUnlock() t, ok := c.registeredEntities[name] if !ok { return nil, errors.New("entity " + name + " is not registered") } return t, nil } func makeStore(ctx context.Context, cfg *ClusterConfig) (store.Store, error) { consul.Register() boltdb.Register() log.G(ctx).Info("creating store", zap.Any("store", cfg)) var ( endpts = []string{cfg.Store.Endpoint} backend = store.Backend(cfg.Store.Type) tlsConf *tls.Config ) if len(cfg.Store.CertFile) != 0 && len(cfg.Store.KeyFile) != 0 { cer, err := tls.LoadX509KeyPair(cfg.Store.CertFile, cfg.Store.KeyFile) if err != nil { return nil, err } tlsConf = &tls.Config{ Certificates: []tls.Certificate{cer}, } } config := store.Config{ TLS: tlsConf, } config.Bucket = cfg.Store.Bucket return libkv.NewStore(backend, endpts, &config) } func (c *cluster) close(err error) { log.G(c.ctx).Error("cluster failure", zap.Error(err)) c.leaderLock.Lock() c.leaderId = "" c.isLeader = false c.leaderLock.Unlock() c.Close() } func (c *cluster) emitLeadershipEvent() { c.leaderLock.Lock() defer c.leaderLock.Unlock() endpoints, ok := c.clusterEndpoints[c.leaderId] if !ok { log.G(c.ctx).Error("leader endpoint not found", zap.String("leader_id", c.leaderId)) return } c.eventChannel <- LeadershipEvent{ Held: c.isLeader, LeaderId: c.leaderId, LeaderEndpoints: endpoints.Client, } } func (c *cluster) memberExists(id string) bool { c.leaderLock.RLock() defer c.leaderLock.RUnlock() _, ok := c.clients[id] return ok } func (c *cluster) registerMemberFromKV(member *store.KVPair) error { id := fetchNameFromPath(member.Key) if id == c.id { return nil } if c.memberExists(id) { return nil } endpts := &endpointsInfo{} err := json.Unmarshal(member.Value, &endpts) if err != nil { return err } return c.registerMember(id, endpts.Client, endpts.Worker) } func (c *cluster) registerMember(id string, clientEndpoints, workerEndpoints []string) error { log.G(c.ctx).Info("fetched endpointsInfo of new member", zap.Any("client_endpoints", clientEndpoints), zap.Any("worker_endpoints", workerEndpoints)) c.leaderLock.Lock() c.clusterEndpoints[id] = &endpointsInfo{Client: clientEndpoints, Worker: workerEndpoints} c.eventChannel <- NewMemberEvent{endpointsInfo: *c.clusterEndpoints[id], Id: id} c.leaderLock.Unlock() if id == c.id { return nil } for _, ep := range clientEndpoints { conn, err := xgrpc.NewClient(c.ctx, ep, c.creds, grpc.WithBlock(), grpc.WithTimeout(time.Second*5)) if err != nil { log.G(c.ctx).Warn("could not connect to hub", zap.String("endpoint", ep), zap.Error(err)) continue } else { log.G(c.ctx).Info("successfully connected to cluster member") c.leaderLock.Lock() _, ok := c.clients[id] if ok { log.G(c.ctx).Info("duplicated connection - dropping") conn.Close() c.leaderLock.Unlock() return nil } c.clients[id] = &client{pb.NewHubClient(conn), conn} c.leaderLock.Unlock() return nil } } return errors.New("could not connect to any provided member endpoint") } func fetchNameFromPath(key string) string { parts := strings.Split(key, "/") return parts[len(parts)-1] } func getEndpoints(config *ClusterConfig, workerEndpoint string) (clientEndpoints, workerEndpoints []string, err error) { clientEndpoint := config.AnnounceEndpoint if len(clientEndpoint) == 0 { clientEndpoint = config.Endpoint } clientEndpoints, err = parseEndpoints(clientEndpoint) if err != nil { return nil, nil, errors.Wrap(err, "failed to get client endpointsInfo") } workerEndpoints, err = parseEndpoints(workerEndpoint) if err != nil { return nil, nil, errors.Wrap(err, "failed to get worker endpointsInfo") } return } func parseEndpoints(endpoint string) (endpts []string, err error) { ipAddr, port, err := net.SplitHostPort(endpoint) if err != nil { return nil, err } if len(ipAddr) != 0 { ip := net.ParseIP(ipAddr) if ip == nil { return nil, fmt.Errorf( "client endpoint %s must be a valid IP", ipAddr) } if !ip.IsUnspecified() { endpts = append(endpts, endpoint) return endpts, nil } } systemIPs, err := util.GetAvailableIPs() if err != nil { return nil, err } for _, ip := range systemIPs { endpts = append(endpts, net.JoinHostPort(ip.String(), port)) } return endpts, nil }
1
6,298
Go standard linter hates `Id`s and likes `ID`s, please do not resist.
sonm-io-core
go
@@ -71,11 +71,11 @@ module Travis end def fetch_ref - cmd "git fetch origin +#{data.ref}: ", assert: true, timeout: :git_fetch_ref, fold: "git.#{next_git_fold_number}" + cmd "git fetch origin +#{data.ref}: ", assert: true, timeout: :git_fetch_ref, fold: "git.#{next_git_fold_number}", retry: true end def git_checkout - cmd "git checkout -qf #{data.pull_request ? 'FETCH_HEAD' : data.commit}", assert: true, fold: "git.#{next_git_fold_number}" + cmd "git checkout -qf #{data.pull_request ? 'FETCH_HEAD' : data.commit}", assert: true, fold: "git.#{next_git_fold_number}", retry: true end def submodules?
1
require 'shellwords' module Travis module Build class Script module Git DEFAULTS = { git: { depth: 50, submodules: true, strategy: 'clone' } } def checkout install_source_key if tarball_clone? download_tarball else git_clone ch_dir fetch_ref if fetch_ref? git_checkout submodules if submodules? end rm_key sh.to_s end private def install_source_key return unless config[:source_key] echo "\nInstalling an SSH key\n" cmd "echo '#{config[:source_key]}' | base64 --decode > ~/.ssh/id_rsa", echo: false, log: false cmd 'chmod 600 ~/.ssh/id_rsa', echo: false, log: false cmd 'eval `ssh-agent` > /dev/null 2>&1', echo: false, log: false cmd 'ssh-add ~/.ssh/id_rsa > /dev/null 2>&1', echo: false, log: false # BatchMode - If set to 'yes', passphrase/password querying will be disabled. # TODO ... how to solve StrictHostKeyChecking correctly? deploy a knownhosts file? cmd %(echo -e "Host #{data.source_host}\n\tBatchMode yes\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config), echo: false, log: false end def download_tarball cmd "mkdir -p #{dir}", assert: true curl_cmd = "curl -o #{sanitized_slug}.tar.gz #{oauth_token}-L #{tarball_url}" cmd curl_cmd, echo: curl_cmd.gsub(data.token || /\Za/, '[SECURE]'), assert: true, retry: true, fold: "tarball.#{next_git_fold_number}" cmd "tar xfz #{sanitized_slug}.tar.gz", assert: true cmd "mv #{sanitized_slug}-#{data.commit[0..6]}/* #{dir}", assert: true ch_dir end def git_clone set 'GIT_ASKPASS', 'echo', :echo => false # this makes git interactive auth fail self.if "! -d #{dir}/.git" do cmd "git clone #{clone_args} #{data.source_url} #{dir}", assert: true, timeout: :git_clone, fold: "git.#{next_git_fold_number}", retry: true end self.else do cmd "git fetch origin", assert: true, timeout: :git_clone, fold: "git.#{next_git_fold_number}", retry: true end end def ch_dir cmd "cd #{dir}", assert: true, timeout: false end def rm_key raw 'rm -f ~/.ssh/source_rsa' end def fetch_ref? !!data.ref end def fetch_ref cmd "git fetch origin +#{data.ref}: ", assert: true, timeout: :git_fetch_ref, fold: "git.#{next_git_fold_number}" end def git_checkout cmd "git checkout -qf #{data.pull_request ? 'FETCH_HEAD' : data.commit}", assert: true, fold: "git.#{next_git_fold_number}" end def submodules? config[:git][:submodules] end def submodules self.if '-f .gitmodules' do cmd 'echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config', echo: false cmd 'git submodule init', fold: "git.#{next_git_fold_number}" cmd 'git submodule update', assert: true, timeout: :git_submodules, fold: "git.#{next_git_fold_number}" end end def clone_args args = "--depth=#{config[:git][:depth]}" args << " --branch=#{data.branch.shellescape}" unless data.ref args end def tarball_clone? config[:git][:strategy] == 'tarball' end def dir data.slug end def tarball_url "#{data.api_url}/tarball/#{data.commit}" end def oauth_token data.token ? "-H \"Authorization: token #{data.token}\" " : nil end def sanitized_slug data.slug.gsub('/', '-') end def next_git_fold_number @git_fold_number ||= 0 @git_fold_number += 1 end end end end end
1
11,378
I don't think retrying here is as useful, since `git-checkout` doesn't access the network AFAIK, and if it fails once it's probably going to fail all three times.
travis-ci-travis-build
rb
@@ -172,7 +172,7 @@ func SetCertificateNotBefore(p metav1.Time) CertificateModifier { } } -func SetCertificateRenewalTIme(p metav1.Time) CertificateModifier { +func SetCertificateRenewalTime(p metav1.Time) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.RenewalTime = &p }
1
/* Copyright 2020 The cert-manager Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package gen import ( "time" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" v1 "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1" cmmeta "github.com/jetstack/cert-manager/pkg/apis/meta/v1" ) type CertificateModifier func(*v1.Certificate) func Certificate(name string, mods ...CertificateModifier) *v1.Certificate { c := &v1.Certificate{ ObjectMeta: ObjectMeta(name), Spec: v1.CertificateSpec{ PrivateKey: &v1.CertificatePrivateKey{}, }, } for _, mod := range mods { mod(c) } return c } func CertificateFrom(crt *v1.Certificate, mods ...CertificateModifier) *v1.Certificate { crt = crt.DeepCopy() for _, mod := range mods { mod(crt) } return crt } // SetIssuer sets the Certificate.spec.issuerRef field func SetCertificateIssuer(o cmmeta.ObjectReference) CertificateModifier { return func(c *v1.Certificate) { c.Spec.IssuerRef = o } } func SetCertificateDNSNames(dnsNames ...string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.DNSNames = dnsNames } } func SetCertificateCommonName(commonName string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.CommonName = commonName } } func SetCertificateIPs(ips ...string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.IPAddresses = ips } } func SetCertificateURIs(uris ...string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.URIs = uris } } func SetCertificateIsCA(isCA bool) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.IsCA = isCA } } func SetCertificateKeyAlgorithm(keyAlgorithm v1.PrivateKeyAlgorithm) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.PrivateKey.Algorithm = keyAlgorithm } } func SetCertificateKeySize(keySize int) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.PrivateKey.Size = keySize } } func SetCertificateKeyEncoding(keyEncoding v1.PrivateKeyEncoding) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.PrivateKey.Encoding = keyEncoding } } func SetCertificateSecretName(secretName string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.SecretName = secretName } } // SetCertificateSecretTemplate sets annotations and labels to be attached to the secret metadata. func SetCertificateSecretTemplate(annotations, labels map[string]string) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.SecretTemplate = &v1.CertificateSecretTemplate{ Annotations: annotations, Labels: labels, } } } func SetCertificateDuration(duration time.Duration) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.Duration = &metav1.Duration{Duration: duration} } } func SetCertificateRenewBefore(renewBefore time.Duration) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.RenewBefore = &metav1.Duration{Duration: renewBefore} } } func SetCertificateNextPrivateKeySecretName(name string) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.NextPrivateKeySecretName = &name } } func SetCertificateStatusCondition(c v1.CertificateCondition) CertificateModifier { return func(crt *v1.Certificate) { if len(crt.Status.Conditions) == 0 { crt.Status.Conditions = []v1.CertificateCondition{c} return } for i, existingC := range crt.Status.Conditions { if existingC.Type == c.Type { crt.Status.Conditions[i] = c return } } crt.Status.Conditions = append(crt.Status.Conditions, c) } } func SetCertificateLastFailureTime(p metav1.Time) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.LastFailureTime = &p } } func SetCertificateNotAfter(p metav1.Time) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.NotAfter = &p } } func SetCertificateNotBefore(p metav1.Time) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.NotBefore = &p } } func SetCertificateRenewalTIme(p metav1.Time) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.RenewalTime = &p } } func SetCertificateOrganization(orgs ...string) CertificateModifier { return func(ch *v1.Certificate) { ch.Spec.Subject.Organizations = orgs } } func SetCertificateNamespace(namespace string) CertificateModifier { return func(crt *v1.Certificate) { crt.ObjectMeta.Namespace = namespace } } func SetCertificateKeyUsages(usages ...v1.KeyUsage) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.Usages = usages } } func SetCertificateRevision(revision int) CertificateModifier { return func(crt *v1.Certificate) { crt.Status.Revision = &revision } } func SetCertificateUID(uid types.UID) CertificateModifier { return func(crt *v1.Certificate) { crt.UID = uid } } func SetCertificateGeneration(gen int64) CertificateModifier { return func(crt *v1.Certificate) { crt.Generation = gen } } func AddCertificateAnnotations(annotations map[string]string) CertificateModifier { return func(crt *v1.Certificate) { if crt.Annotations == nil { crt.Annotations = make(map[string]string) } for k, v := range annotations { crt.Annotations[k] = v } } } func AddCertificateLabels(labels map[string]string) CertificateModifier { return func(crt *v1.Certificate) { if crt.Labels == nil { crt.Labels = make(map[string]string) } for k, v := range labels { crt.Labels[k] = v } } } // CertificateRef creates an owner reference for a certificate without having to // give the full certificate. Only use this function for testing purposes. // // Note that the only "important" field that must be filled in ownerReference // [1] is the UID. Most notably, the IsControlledBy function [2] only cares // about the UID. The apiVersion, kind and name are only used for information // purposes. // // [1]: https://github.com/kubernetes/apimachinery/blob/10b3882/pkg/apis/meta/v1/types.go#L273-L275 // [2]: https://github.com/kubernetes/apimachinery/blob/10b3882/pkg/apis/meta/v1/controller_ref.go#L29 func CertificateRef(certName, certUID string) metav1.OwnerReference { return *metav1.NewControllerRef( Certificate(certName, SetCertificateUID(types.UID(certUID)), ), v1.SchemeGroupVersion.WithKind("Certificate"), ) } func SetCertificateRevisionHistoryLimit(limit int32) CertificateModifier { return func(crt *v1.Certificate) { crt.Spec.RevisionHistoryLimit = &limit } }
1
29,449
Fixing this typo caused a build failure in `cmd/ctl/pkg/status/certificate/certificate_test.go:238` Could you also update it there?
jetstack-cert-manager
go
@@ -25,6 +25,8 @@ _rtp_payload_types = { 31: 'H261', 32: 'MPV', 33: 'MP2T', 34: 'H263' } +extension_condition = lambda pkt: pkt.extension + class RTP(Packet): name="RTP" fields_desc = [ BitField('version', 2, 2),
1
## This file is part of Scapy ## See http://www.secdev.org/projects/scapy for more informations ## Copyright (C) Philippe Biondi <[email protected]> ## This program is published under a GPLv2 license """ RTP (Real-time Transport Protocol). """ from scapy.packet import * from scapy.fields import * _rtp_payload_types = { # http://www.iana.org/assignments/rtp-parameters 0: 'G.711 PCMU', 3: 'GSM', 4: 'G723', 5: 'DVI4', 6: 'DVI4', 7: 'LPC', 8: 'PCMA', 9: 'G722', 10: 'L16', 11: 'L16', 12: 'QCELP', 13: 'CN', 14: 'MPA', 15: 'G728', 16: 'DVI4', 17: 'DVI4', 18: 'G729', 25: 'CelB', 26: 'JPEG', 28: 'nv', 31: 'H261', 32: 'MPV', 33: 'MP2T', 34: 'H263' } class RTP(Packet): name="RTP" fields_desc = [ BitField('version', 2, 2), BitField('padding', 0, 1), BitField('extension', 0, 1), BitFieldLenField('numsync', None, 4, count_of='sync'), BitField('marker', 0, 1), BitEnumField('payload_type', 0, 7, _rtp_payload_types), ShortField('sequence', 0), IntField('timestamp', 0), IntField('sourcesync', 0), FieldListField('sync', [], IntField("id",0), count_from=lambda pkt:pkt.numsync) ]
1
10,597
Please note that adding this will import `extension_condition` in the entire scapy (as you are located in a layer). You might want to move this inside the RTP class ? Would be better i think
secdev-scapy
py
@@ -72,7 +72,7 @@ module RSpec::Core end parser.on('--seed SEED', Integer, 'Equivalent of --order rand:SEED.') do |seed| - options[:order] = "rand:#{seed}" + options[:seed] = seed end parser.on('--fail-fast', 'Abort the run on first failure.') do |o|
1
# http://www.ruby-doc.org/stdlib/libdoc/optparse/rdoc/classes/OptionParser.html require 'optparse' module RSpec::Core class Parser def self.parse!(args) new.parse!(args) end class << self alias_method :parse, :parse! end def parse!(args) return {} if args.empty? convert_deprecated_args(args) options = args.delete('--tty') ? {:tty => true} : {} begin parser(options).parse!(args) rescue OptionParser::InvalidOption => e abort "#{e.message}\n\nPlease use --help for a listing of valid options" end options end def convert_deprecated_args(args) args.map! { |arg| case arg when "--formatter" RSpec.deprecate("the --formatter option", :replacement => "-f or --format") "--format" when "--default_path" "--default-path" when "--line_number" "--line-number" else arg end } end alias_method :parse, :parse! def parser(options) OptionParser.new do |parser| parser.banner = "Usage: rspec [options] [files or directories]\n\n" parser.on('-I PATH', 'Specify PATH to add to $LOAD_PATH (may be used more than once).') do |dir| options[:libs] ||= [] options[:libs] << dir end parser.on('-r', '--require PATH', 'Require a file.') do |path| options[:requires] ||= [] options[:requires] << path end parser.on('-O', '--options PATH', 'Specify the path to a custom options file.') do |path| options[:custom_options_file] = path end parser.on('--order TYPE[:SEED]', 'Run examples by the specified order type.', ' [default] files are ordered based on the underlying file', ' system\'s order', ' [rand] randomize the order of files, groups and examples', ' [random] alias for rand', ' [random:SEED] e.g. --order random:123') do |o| options[:order] = o end parser.on('--seed SEED', Integer, 'Equivalent of --order rand:SEED.') do |seed| options[:order] = "rand:#{seed}" end parser.on('--fail-fast', 'Abort the run on first failure.') do |o| options[:fail_fast] = true end parser.on('--no-fail-fast', 'Do not abort the run on first failure.') do |o| options[:fail_fast] = false end parser.on('--failure-exit-code CODE', Integer, 'Override the exit code used when there are failing specs.') do |code| options[:failure_exit_code] = code end parser.on('--dry-run', 'Print the formatter output of your suite without', ' running any examples or hooks') do |o| options[:dry_run] = true end parser.on('-X', '--[no-]drb', 'Run examples via DRb.') do |o| options[:drb] = o end parser.on('--drb-port PORT', 'Port to connect to the DRb server.') do |o| options[:drb_port] = o.to_i end parser.on('--init', 'Initialize your project with RSpec.') do |cmd| require 'rspec/core/project_initializer' ProjectInitializer.new(cmd).run exit end parser.on('--configure', 'Deprecated. Use --init instead.') do |cmd| RSpec.warning "--configure is deprecated with no effect. Use --init instead.", :call_site => nil exit end parser.separator("\n **** Output ****\n\n") parser.on('-f', '--format FORMATTER', 'Choose a formatter.', ' [p]rogress (default - dots)', ' [d]ocumentation (group and example names)', ' [h]tml', ' [j]son', ' custom formatter class name') do |o| options[:formatters] ||= [] options[:formatters] << [o] end parser.on('-o', '--out FILE', 'Write output to a file instead of $stdout. This option applies', ' to the previously specified --format, or the default format', ' if no format is specified.' ) do |o| options[:formatters] ||= [['progress']] options[:formatters].last << o end parser.on('-b', '--backtrace', 'Enable full backtrace.') do |o| options[:full_backtrace] = true end parser.on('-c', '--[no-]color', '--[no-]colour', 'Enable color in the output.') do |o| options[:color] = o end parser.on('-p', '--[no-]profile [COUNT]', 'Enable profiling of examples and list the slowest examples (default: 10).') do |argument| options[:profile_examples] = if argument.nil? true elsif argument == false false else begin Integer(argument) rescue ArgumentError RSpec.warning "Non integer specified as profile count, seperate " + "your path from options with -- e.g. " + "`rspec --profile -- #{argument}`", :call_site => nil true end end end parser.on('-w', '--warnings', 'Enable ruby warnings') do options[:warnings] = true end parser.separator <<-FILTERING **** Filtering/tags **** In addition to the following options for selecting specific files, groups, or examples, you can select a single example by appending the line number to the filename: rspec path/to/a_spec.rb:37 FILTERING parser.on('-P', '--pattern PATTERN', 'Load files matching pattern (default: "spec/**/*_spec.rb").') do |o| options[:pattern] = o end parser.on('-e', '--example STRING', "Run examples whose full nested names include STRING (may be", " used more than once)") do |o| (options[:full_description] ||= []) << Regexp.compile(Regexp.escape(o)) end parser.on('-l', '--line-number LINE', 'Specify line number of an example or group (may be', ' used more than once).') do |o| (options[:line_numbers] ||= []) << o end parser.on('-t', '--tag TAG[:VALUE]', 'Run examples with the specified tag, or exclude examples', 'by adding ~ before the tag.', ' - e.g. ~slow', ' - TAG is always converted to a symbol') do |tag| filter_type = tag =~ /^~/ ? :exclusion_filter : :inclusion_filter name,value = tag.gsub(/^(~@|~|@)/, '').split(':',2) name = name.to_sym options[filter_type] ||= {} options[filter_type][name] = case value when nil then true # The default value for tags is true when 'true' then true when 'false' then false when 'nil' then nil when /^:/ then value[1..-1].to_sym when /^\d+$/ then Integer(value) when /^\d+.\d+$/ then Float(value) else value end end parser.on('--default-path PATH', 'Set the default path where RSpec looks for examples (can', ' be a path to a file or a directory).') do |path| options[:default_path] = path end parser.separator("\n **** Utility ****\n\n") parser.on('-v', '--version', 'Display the version.') do puts RSpec::Core::Version::STRING exit end parser.on_tail('-h', '--help', "You're looking at it.") do puts parser exit end end end end end
1
10,523
This will need updating, as I'm not sure we'll want to turn on random ordering if they've specified a seed for programmatic use.
rspec-rspec-core
rb
@@ -112,6 +112,9 @@ loader_init(void) char name_copy[MAXIMUM_PATH]; mod = privload_insert(NULL, privmod_static[i].base, privmod_static[i].size, privmod_static[i].name, privmod_static[i].path); +#ifdef CLIENT_INTERFACE + mod->is_client = true; +#endif LOG(GLOBAL, LOG_LOADER, 1, "%s: processing imports for %s\n", __FUNCTION__, mod->name); /* save a copy for error msg, b/c mod will be unloaded (i#643) */
1
/* ******************************************************************************* * Copyright (c) 2011-2019 Google, Inc. All rights reserved. * Copyright (c) 2010 Massachusetts Institute of Technology All rights reserved. * Copyright (c) 2009 Derek Bruening All rights reserved. * *******************************************************************************/ /* * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of VMware, Inc. nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ /* * loader_shared.c: os independent code of DR's custom private library loader * * original case: i#157 */ #include "globals.h" #include "module_shared.h" #ifdef CLIENT_INTERFACE # include "instrument.h" /* for instrument_client_lib_unloaded */ #endif /* ok to be in .data w/ no sentinel head node b/c never empties out * .ntdll always there for Windows, so no need to unprot. * XXX: Does it hold for Linux? * It seems no library is must in Linux, even the loader. * Maybe the linux-gate or we just create a fake one? */ static privmod_t *modlist; /* Recursive library load could happen: * Linux: when load dependent library * Windows: redirect_* can be invoked from private libray * entry points. */ DECLARE_CXTSWPROT_VAR(recursive_lock_t privload_lock, INIT_RECURSIVE_LOCK(privload_lock)); /* Protected by privload_lock */ #ifdef DEBUG DECLARE_NEVERPROT_VAR(static uint privload_recurse_cnt, 0); #endif /* These are only written during init so ok to be in .data */ static privmod_t privmod_static[PRIVMOD_STATIC_NUM]; /* this marks end of privmod_static[] */ uint privmod_static_idx; /* We store client paths here to use for locating libraries later * Unfortunately we can't use dynamic storage, and the paths are clobbered * immediately by instrument_load_client_libs, so we have max space here. */ char search_paths[SEARCH_PATHS_NUM][MAXIMUM_PATH]; /* this marks end of search_paths[] */ uint search_paths_idx; /* Used for in_private_library() */ vm_area_vector_t *modlist_areas; /* forward decls */ static bool privload_load_finalize(privmod_t *privmod); static bool privload_has_thread_entry(void); static bool privload_modlist_initialized(void); /***************************************************************************/ void loader_init(void) { uint i; privmod_t *mod; acquire_recursive_lock(&privload_lock); VMVECTOR_ALLOC_VECTOR(modlist_areas, GLOBAL_DCONTEXT, VECTOR_SHARED | VECTOR_NEVER_MERGE /* protected by privload_lock */ | VECTOR_NO_LOCK, modlist_areas); /* os specific loader initialization prologue before finalize the load */ os_loader_init_prologue(); /* Process client libs we loaded early but did not finalize */ for (i = 0; i < privmod_static_idx; i++) { /* Transfer to real list so we can do normal processing */ char name_copy[MAXIMUM_PATH]; mod = privload_insert(NULL, privmod_static[i].base, privmod_static[i].size, privmod_static[i].name, privmod_static[i].path); LOG(GLOBAL, LOG_LOADER, 1, "%s: processing imports for %s\n", __FUNCTION__, mod->name); /* save a copy for error msg, b/c mod will be unloaded (i#643) */ snprintf(name_copy, BUFFER_SIZE_ELEMENTS(name_copy), "%s", mod->name); NULL_TERMINATE_BUFFER(name_copy); if (!privload_load_finalize(mod)) { mod = NULL; /* it's been unloaded! */ #ifdef CLIENT_INTERFACE SYSLOG(SYSLOG_ERROR, CLIENT_LIBRARY_UNLOADABLE, 5, get_application_name(), get_application_pid(), name_copy, "\n\tUnable to locate imports of client library"); #endif os_terminate(NULL, TERMINATE_PROCESS); ASSERT_NOT_REACHED(); } } /* os specific loader initialization epilogue after finalize the load */ os_loader_init_epilogue(); release_recursive_lock(&privload_lock); } void loader_exit(void) { /* We must unload for detach so can't leave them loaded */ acquire_recursive_lock(&privload_lock); /* The list is kept in reverse-dependent order so we can unload from the * front without breaking dependencies. */ while (modlist != NULL) privload_unload(modlist); /* os related loader finalization */ os_loader_exit(); vmvector_delete_vector(GLOBAL_DCONTEXT, modlist_areas); release_recursive_lock(&privload_lock); DELETE_RECURSIVE_LOCK(privload_lock); } void loader_thread_init(dcontext_t *dcontext) { privmod_t *mod; if (modlist == NULL) { #ifdef WINDOWS /* FIXME i#338: once restore order this will become nop */ /* os specific thread initilization prologue for loader with no lock */ os_loader_thread_init_prologue(dcontext); /* os specific thread initilization epilogue for loader with no lock */ os_loader_thread_init_epilogue(dcontext); #endif /* WINDOWS */ } else { /* os specific thread initilization prologue for loader with no lock */ os_loader_thread_init_prologue(dcontext); if (privload_has_thread_entry()) { /* We rely on lock isolation to prevent deadlock while we're here * holding privload_lock and the priv lib * DllMain may acquire the same lock that another thread acquired * in its app code before requesting a synchall (flush, exit). * FIXME i#875: we do not have ntdll!RtlpFlsLock isolated. * Living w/ it for now. It should be unlikely for the app to * hold RtlpFlsLock and then acquire privload_lock: privload_lock * is used for import redirection but those don't apply within * ntdll. */ ASSERT_OWN_NO_LOCKS(); acquire_recursive_lock(&privload_lock); /* Walk forward and call independent libs last. * We do notify priv libs of client threads. */ for (mod = modlist; mod != NULL; mod = mod->next) { if (!mod->externally_loaded) privload_call_entry(mod, DLL_THREAD_INIT); } release_recursive_lock(&privload_lock); } /* os specific thread initilization epilogue for loader with no lock */ os_loader_thread_init_epilogue(dcontext); } } void loader_thread_exit(dcontext_t *dcontext) { privmod_t *mod; /* assuming context swap have happened when entered DR */ if (privload_has_thread_entry() && /* Only call if we're cleaning up the currently executing thread, as * that's what the entry routine is going to do! Calling on other * threads results in problems like double frees (i#969). Exiting * another thread should only happen on process exit or forced thread * termination. The former can technically continue (app could call * NtTerminateProcess(0) but then keep going) but we have never seen * that; and the latter doesn't do full native cleanups anyway. Thus * we're not worried about leaks from not calling DLL_THREAD_EXIT. * (We can't check get_thread_private_dcontext() b/c it's already cleared.) */ dcontext->owning_thread == d_r_get_thread_id()) { acquire_recursive_lock(&privload_lock); /* Walk forward and call independent libs last */ for (mod = modlist; mod != NULL; mod = mod->next) { if (!mod->externally_loaded) privload_call_entry(mod, DLL_THREAD_EXIT); } release_recursive_lock(&privload_lock); } /* os specific thread exit for loader, holding no lock */ os_loader_thread_exit(dcontext); } /* Given a path-less name, locates and loads a private library for DR's client. * Will also accept a full path. */ app_pc locate_and_load_private_library(const char *name, bool reachable) { DODEBUG(privload_recurse_cnt = 0;); return privload_load_private_library(name, reachable); } /* Load private library for DR's client. Must be passed a full path. */ app_pc load_private_library(const char *filename, bool reachable) { app_pc res = NULL; privmod_t *privmod; /* Simpler to lock up front than to unmap on race. All helper routines * assume the lock is held. */ acquire_recursive_lock(&privload_lock); privmod = privload_lookup(filename); /* XXX: If the private lib has been loaded, shall we increase the counter * or report error? */ if (privmod == NULL) { DODEBUG(privload_recurse_cnt = 0;); privmod = privload_load(filename, NULL, reachable); } if (privmod != NULL) res = privmod->base; release_recursive_lock(&privload_lock); return res; } bool unload_private_library(app_pc modbase) { privmod_t *mod; bool res = false; acquire_recursive_lock(&privload_lock); mod = privload_lookup_by_base(modbase); if (mod != NULL) { res = true; /* don't care if refcount hit 0 or not */ privload_unload(mod); } release_recursive_lock(&privload_lock); return res; } bool in_private_library(app_pc pc) { return vmvector_overlap(modlist_areas, pc, pc + 1); } /* Caseless and "separator agnostic" (i#1869) */ static int pathcmp(const char *left, const char *right) { size_t i; for (i = 0; left[i] != '\0' || right[i] != '\0'; i++) { int l = tolower(left[i]); int r = tolower(right[i]); if (l == '/') l = '\\'; if (r == '/') r = '\\'; if (l < r) return -1; if (l > r) return 1; } return 0; } /* Lookup the private loaded library either by basename or by path */ privmod_t * privload_lookup(const char *name) { privmod_t *mod; bool by_path; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); if (name == NULL || name[0] == '\0') return NULL; by_path = IF_WINDOWS_ELSE(double_strrchr(name, DIRSEP, ALT_DIRSEP), strrchr(name, DIRSEP)) != NULL; if (!privload_modlist_initialized()) { uint i; for (i = 0; i < privmod_static_idx; i++) { mod = &privmod_static[i]; if ((by_path && pathcmp(name, mod->path) == 0) || (!by_path && strcasecmp(name, mod->name) == 0)) return mod; } } else { for (mod = modlist; mod != NULL; mod = mod->next) { if ((by_path && pathcmp(name, mod->path) == 0) || (!by_path && strcasecmp(name, mod->name) == 0)) return mod; } } return NULL; } /* Lookup the private loaded library by base */ privmod_t * privload_lookup_by_base(app_pc modbase) { privmod_t *mod; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); if (!privload_modlist_initialized()) { uint i; for (i = 0; i < privmod_static_idx; i++) { if (privmod_static[i].base == modbase) return &privmod_static[i]; } } else { for (mod = modlist; mod != NULL; mod = mod->next) { if (modbase == mod->base) return mod; } } return NULL; } /* Lookup the private loaded library by base */ privmod_t * privload_lookup_by_pc(app_pc pc) { privmod_t *mod; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); if (!privload_modlist_initialized()) { uint i; for (i = 0; i < privmod_static_idx; i++) { if (pc >= privmod_static[i].base && pc < privmod_static[i].base + privmod_static[i].size) return &privmod_static[i]; } } else { for (mod = modlist; mod != NULL; mod = mod->next) { if (pc >= mod->base && pc < mod->base + mod->size) return mod; } } return NULL; } /* Insert privmod after *after * name is assumed to be in immutable persistent storage. * a copy of path is made. */ privmod_t * privload_insert(privmod_t *after, app_pc base, size_t size, const char *name, const char *path) { privmod_t *mod; /* We load client libs before heap is initialized so we use a * static array of initial privmod_t structs until we can fully * load and create proper list entries. */ if (privload_modlist_initialized()) mod = HEAP_TYPE_ALLOC(GLOBAL_DCONTEXT, privmod_t, ACCT_OTHER, PROTECTED); else { /* temporarily use array */ if (privmod_static_idx >= PRIVMOD_STATIC_NUM) { ASSERT_NOT_REACHED(); return NULL; } mod = &privmod_static[privmod_static_idx]; ++privmod_static_idx; ++search_paths_idx; } ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); mod->base = base; mod->size = size; mod->name = name; strncpy(mod->path, path, BUFFER_SIZE_ELEMENTS(mod->path)); mod->os_privmod_data = NULL; /* filled in later */ NULL_TERMINATE_BUFFER(mod->path); /* i#489 DT_SONAME is optional and name passed in could be NULL. * If so, we get libname from path instead. */ if (IF_UNIX_ELSE(mod->name == NULL, false)) { mod->name = double_strrchr(mod->path, DIRSEP, ALT_DIRSEP); if (mod->name == NULL) mod->name = mod->path; } mod->ref_count = 1; mod->externally_loaded = false; #ifdef CLIENT_INTERFACE mod->is_client = false; /* up to caller to set later */ #endif /* do not add non-heap struct to list: in init() we'll move array to list */ if (privload_modlist_initialized()) { if (after == NULL) { bool prot = DATASEC_PROTECTED(DATASEC_RARELY_PROT); mod->next = modlist; mod->prev = NULL; if (prot) SELF_UNPROTECT_DATASEC(DATASEC_RARELY_PROT); if (modlist != NULL) modlist->prev = mod; modlist = mod; if (prot) SELF_PROTECT_DATASEC(DATASEC_RARELY_PROT); } else { /* we insert after dependent libs so we can unload in forward order */ mod->prev = after; mod->next = after->next; if (after->next != NULL) after->next->prev = mod; after->next = mod; } } return (void *)mod; } static bool privload_search_path_exists(const char *path, size_t len) { uint i; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); for (i = 0; i < search_paths_idx; i++) { if (IF_UNIX_ELSE(strncmp, strncasecmp)(search_paths[i], path, len) == 0) return true; } return false; } /* i#955: we support a <basename>.drpath text file listing search paths. * XXX i#1078: should we support something like DT_RPATH's $ORIGIN for relative * entries in this file? */ static void privload_read_drpath_file(const char *libname) { char path[MAXIMUM_PATH]; char *end = strrchr(libname, '.'); if (end == NULL) return; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); snprintf(path, BUFFER_SIZE_ELEMENTS(path), "%.*s.%s", end - libname, libname, DR_RPATH_SUFFIX); NULL_TERMINATE_BUFFER(path); LOG(GLOBAL, LOG_LOADER, 3, "%s: looking for %s\n", __FUNCTION__, path); if (os_file_exists(path, false /*!is_dir*/)) { /* Easiest to parse by mapping. It's a newline-separated list of * paths. We support carriage returns as well. */ file_t f = os_open(path, OS_OPEN_READ); char *map; size_t map_size; uint64 file_size; if (f != INVALID_FILE && os_get_file_size_by_handle(f, &file_size)) { LOG(GLOBAL, LOG_LOADER, 2, "%s: reading %s\n", __FUNCTION__, path); ASSERT_TRUNCATE(map_size, size_t, file_size); map_size = (size_t)file_size; map = (char *)os_map_file(f, &map_size, 0, NULL, MEMPROT_READ, 0); if (map != NULL && map_size >= file_size) { const char *s = (char *)map; const char *nl; while (s < map + file_size && search_paths_idx < SEARCH_PATHS_NUM) { for (nl = s; nl < map + file_size && *nl != '\r' && *nl != '\n'; nl++) { } if (nl == s) break; if (!privload_search_path_exists(s, nl - s)) { snprintf(search_paths[search_paths_idx], BUFFER_SIZE_ELEMENTS(search_paths[search_paths_idx]), "%.*s", nl - s, s); NULL_TERMINATE_BUFFER(search_paths[search_paths_idx]); LOG(GLOBAL, LOG_LOADER, 1, "%s: added search dir \"%s\"\n", __FUNCTION__, search_paths[search_paths_idx]); search_paths_idx++; } s = nl + 1; while (s < map + file_size && (*s == '\r' || *s == '\n')) s++; } os_unmap_file((byte *)map, map_size); } os_close(f); } } } privmod_t * privload_load(const char *filename, privmod_t *dependent, bool client) { app_pc map; size_t size; privmod_t *privmod; /* i#350: it would be nice to have no-dcontext try/except support: * then we could wrap the whole load process, like ntdll!Ldr does */ ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); DOCHECK(1, { /* we have limited stack but we don't expect deep recursion */ privload_recurse_cnt++; ASSERT_CURIOSITY(privload_recurse_cnt < 20); /* win7 dbghelp gets to 12 */ }); LOG(GLOBAL, LOG_LOADER, 2, "%s: loading %s\n", __FUNCTION__, filename); map = privload_map_and_relocate(filename, &size, client ? MODLOAD_REACHABLE : 0); if (map == NULL) { LOG(GLOBAL, LOG_LOADER, 1, "%s: failed to map %s\n", __FUNCTION__, filename); return NULL; } /* i#955: support a <basename>.drpath file for search paths */ privload_read_drpath_file(filename); /* For direct client libs (not dependent libs), * keep a copy of the lib path for use in searching: we'll strdup in loader_init. * This needs to come before privload_insert which will inc search_paths_idx. * There should be very few of these (normally just 1), so we don't call * privload_search_path_exists() (which would require refactoring when the * search_paths_idx increment happens). */ if (!privload_modlist_initialized()) { const char *end = double_strrchr(filename, DIRSEP, ALT_DIRSEP); ASSERT(search_paths_idx < SEARCH_PATHS_NUM); if (end != NULL && end - filename < BUFFER_SIZE_ELEMENTS(search_paths[search_paths_idx])) { snprintf(search_paths[search_paths_idx], end - filename, "%s", filename); NULL_TERMINATE_BUFFER(search_paths[search_paths_idx]); } else ASSERT_NOT_REACHED(); /* should never have client lib path so big */ } /* Add to list before processing imports in case of mutually dependent libs */ /* Since we control when unmapped, we can use orig export name string and * don't need strdup */ /* Add after its dependent to preserve forward-can-unload order */ privmod = privload_insert(dependent, map, size, get_shared_lib_name(map), filename); /* If no heap yet, we'll call finalize later in loader_init() */ if (privmod != NULL && privload_modlist_initialized()) { if (!privload_load_finalize(privmod)) return NULL; } #ifdef CLIENT_INTERFACE if (privmod->is_client) instrument_client_lib_loaded(privmod->base, privmod->base + privmod->size); #endif return privmod; } bool privload_unload(privmod_t *privmod) { ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); ASSERT(privload_modlist_initialized()); ASSERT(privmod->ref_count > 0); privmod->ref_count--; LOG(GLOBAL, LOG_LOADER, 2, "%s: %s refcount => %d\n", __FUNCTION__, privmod->name, privmod->ref_count); if (privmod->ref_count == 0) { LOG(GLOBAL, LOG_LOADER, 1, "%s: unloading %s @ " PFX "\n", __FUNCTION__, privmod->name, privmod->base); #ifdef CLIENT_INTERFACE if (privmod->is_client) instrument_client_lib_unloaded(privmod->base, privmod->base + privmod->size); #endif if (privmod->prev == NULL) { bool prot = DATASEC_PROTECTED(DATASEC_RARELY_PROT); if (prot) SELF_UNPROTECT_DATASEC(DATASEC_RARELY_PROT); modlist = privmod->next; if (prot) SELF_PROTECT_DATASEC(DATASEC_RARELY_PROT); } else privmod->prev->next = privmod->next; if (privmod->next != NULL) privmod->next->prev = privmod->prev; if (!privmod->externally_loaded) { privload_call_entry(privmod, DLL_PROCESS_EXIT); /* this routine may modify modlist, but we're done with it */ privload_unload_imports(privmod); privload_remove_areas(privmod); /* unmap_file removes from DR areas and calls d_r_unmap_file(). * It's ok to call this for client libs: ok to remove what's not there. */ privload_unmap_file(privmod); } HEAP_TYPE_FREE(GLOBAL_DCONTEXT, privmod, privmod_t, ACCT_OTHER, PROTECTED); return true; } return false; } #ifdef X64 # define LIB_SUBDIR "lib64" #else # define LIB_SUBDIR "lib32" #endif #define EXT_SUBDIR "ext" #define DRMF_SUBDIR "drmemory/drmf" static void privload_add_subdir_path(const char *subdir) { const char *path, *mid, *end; ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); /* We support loading from various subdirs of the DR package. We * locate these by assuming dynamorio.dll is in * <prefix>/lib{32,64}/{debug,release}/ and searching backward for * that lib{32,64} part. We assume that "subdir" is followed * by the same /lib{32,64}/{debug,release}/. * XXX: this does not work from a build dir: only using exports! */ path = get_dynamorio_library_path(); mid = strstr(path, LIB_SUBDIR); if (mid != NULL && search_paths_idx < SEARCH_PATHS_NUM && (strlen(path) + strlen(subdir) + 1 /*sep*/) < BUFFER_SIZE_ELEMENTS(search_paths[search_paths_idx])) { char *s = search_paths[search_paths_idx]; snprintf(s, mid - path, "%s", path); s += (mid - path); snprintf(s, strlen(subdir) + 1 /*sep*/, "%s%c", subdir, DIRSEP); s += strlen(subdir) + 1 /*sep*/; end = double_strrchr(path, DIRSEP, ALT_DIRSEP); if (end != NULL && search_paths_idx < SEARCH_PATHS_NUM) { snprintf(s, end - mid, "%s", mid); NULL_TERMINATE_BUFFER(search_paths[search_paths_idx]); LOG(GLOBAL, LOG_LOADER, 1, "%s: added Extension search dir %s\n", __FUNCTION__, search_paths[search_paths_idx]); search_paths_idx++; } } } void privload_add_drext_path(void) { /* We support loading from the Extensions dir: * <prefix>/ext/lib{32,64}/{debug,release}/ * Xref i#277/PR 540817. */ privload_add_subdir_path(EXT_SUBDIR); /* We also support loading from a co-located DRMF package. */ privload_add_subdir_path(DRMF_SUBDIR); } /* most uses should call privload_load() instead * if it fails, unloads */ static bool privload_load_finalize(privmod_t *privmod) { ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); ASSERT(!privmod->externally_loaded); privload_add_areas(privmod); privload_redirect_setup(privmod); if (!privload_process_imports(privmod)) { LOG(GLOBAL, LOG_LOADER, 1, "%s: failed to process imports %s\n", __FUNCTION__, privmod->name); privload_unload(privmod); return false; } privload_os_finalize(privmod); if (!privload_call_entry(privmod, DLL_PROCESS_INIT)) { LOG(GLOBAL, LOG_LOADER, 1, "%s: entry routine failed\n", __FUNCTION__); privload_unload(privmod); return false; } privload_load_finalized(privmod); LOG(GLOBAL, LOG_LOADER, 1, "%s: loaded %s @ " PFX "-" PFX " from %s\n", __FUNCTION__, privmod->name, privmod->base, privmod->base + privmod->size, privmod->path); return true; } static bool privload_has_thread_entry(void) { return IF_UNIX_ELSE(false, true); } static bool privload_modlist_initialized(void) { return dynamo_heap_initialized; } privmod_t * privload_next_module(privmod_t *mod) { ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); return mod->next; } privmod_t * privload_first_module(void) { ASSERT_OWN_RECURSIVE_LOCK(true, &privload_lock); return modlist; } /* returns whether they all fit */ bool privload_print_modules(bool path, bool lock, char *buf, size_t bufsz, size_t *sofar) { privmod_t *mod; if (lock) acquire_recursive_lock(&privload_lock); for (mod = modlist; mod != NULL; mod = mod->next) { if (!mod->externally_loaded) { if (!print_to_buffer(buf, bufsz, sofar, "%s=" PFX "\n", path ? mod->path : mod->name, mod->base)) { if (lock) release_recursive_lock(&privload_lock); return false; } } } if (lock) release_recursive_lock(&privload_lock); return true; }
1
19,140
Are you sure these are all client libs? This seems fragile: if we later load sthg else early this will break. Seems best to have some kind of sanity check: at least add an assert in the current location where this is set.
DynamoRIO-dynamorio
c
@@ -2167,7 +2167,9 @@ namespace pwiz.Skyline.Model.Lib } if (!string.IsNullOrEmpty(OtherKeys)) { - smallMolLines.Add(new KeyValuePair<string, string> (Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_OtherIDs, OtherKeys.Replace('\t','\n'))); + // Add a separate line for each molecule accession number + var accessionNumDict = MoleculeAccessionNumbers.FormatAccessionNumbers(OtherKeys); + smallMolLines.AddRange(accessionNumDict.Select(pair => new KeyValuePair<string, string>(pair.Key, pair.Value))); } return smallMolLines; }
1
/* * Original author: Brendan MacLean <brendanx .at. u.washington.edu>, * MacCoss Lab, Department of Genome Sciences, UW * * Copyright 2009 University of Washington - Seattle, WA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IO; using System.Linq; using System.Text; using System.Xml; using System.Xml.Schema; using System.Xml.Serialization; using pwiz.Common.Chemistry; using pwiz.Common.Collections; using pwiz.Common.SystemUtil; using pwiz.Skyline.Model.AuditLog; using pwiz.Skyline.Model.Crosslinking; using pwiz.Skyline.Model.DocSettings; using pwiz.Skyline.Model.DocSettings.Extensions; using pwiz.Skyline.Model.Irt; using pwiz.Skyline.Model.Lib.ChromLib; using pwiz.Skyline.Model.Lib.Midas; using pwiz.Skyline.Model.Prosit; using pwiz.Skyline.Model.Results; using pwiz.Skyline.Model.RetentionTimes; using pwiz.Skyline.Properties; using pwiz.Skyline.Util; using pwiz.Skyline.Util.Extensions; namespace pwiz.Skyline.Model.Lib { public sealed class LibraryManager : BackgroundLoader { private readonly Dictionary<string, Library> _loadedLibraries = new Dictionary<string, Library>(); private readonly Dictionary<string, LibraryLoadLock> _loadingLibraries = new Dictionary<string, LibraryLoadLock>(); private class LibraryLoadLock { public Library Library { get; set; } public bool IsLoaded { get; set; } } public override void ClearCache() { lock(_loadedLibraries) { _loadedLibraries.Clear(); } } protected override bool StateChanged(SrmDocument document, SrmDocument previous) { return previous == null || !ReferenceEquals(document.Settings.PeptideSettings.Libraries, previous.Settings.PeptideSettings.Libraries) || !ReferenceEquals(document.Settings.MeasuredResults, previous.Settings.MeasuredResults); } protected override string IsNotLoadedExplained(SrmDocument document) { PeptideLibraries libraries = document.Settings.PeptideSettings.Libraries; if (document.Settings.MeasuredResults != null) { var missingFiles = MidasLibrary.GetMissingFiles(document, new Library[0]); if (missingFiles.Any()) { return TextUtil.LineSeparate(@"MIDAS library is missing files:", TextUtil.LineSeparate(missingFiles)); } } return !libraries.HasLibraries ? null : libraries.IsNotLoadedExplained; } protected override IEnumerable<IPooledStream> GetOpenStreams(SrmDocument document) { if (document == null) yield break; var libraries = document.Settings.PeptideSettings.Libraries.Libraries; foreach (var readStream in libraries.Where(library => library != null) .SelectMany(library => library.ReadStreams)) { yield return readStream; } } protected override bool IsCanceled(IDocumentContainer container, object tag) { if (tag == null) return false; PeptideLibraries libraries = container.Document.Settings.PeptideSettings.Libraries; var missingMidasFiles = tag as string[]; if (missingMidasFiles != null) { return !missingMidasFiles.SequenceEqual(MidasLibrary.GetMissingFiles(container.Document, new Library[0])); } return !libraries.LibrarySpecs.Contains((LibrarySpec)tag); } protected override bool LoadBackground(IDocumentContainer container, SrmDocument document, SrmDocument docCurrent) { var libraries = docCurrent.Settings.PeptideSettings.Libraries; var dictLibraries = new Dictionary<string, Library>(); try { foreach (LibrarySpec spec in libraries.LibrarySpecsUnloaded) { if (spec == null || dictLibraries.ContainsKey(spec.Name)) continue; var library = LoadLibrary(container, spec); if (library == null || !ReferenceEquals(document.Id, container.Document.Id)) { // Loading was cancelled or document changed EndProcessing(document); return false; } dictLibraries.Add(spec.Name, library); } var missingMidasFiles = MidasLibrary.GetMissingFiles(document, libraries.Libraries); var midasLibPath = MidasLibSpec.GetLibraryFileName(container.DocumentFilePath); var midasLibSpec = libraries.MidasLibrarySpecs.FirstOrDefault(libSpec => Equals(libSpec.FilePath, midasLibPath)); var newMidasLibSpec = missingMidasFiles.Any() && midasLibSpec == null; MidasLibrary midasLibrary = null; var failedMidasFiles = new List<MsDataFilePath>(); if (missingMidasFiles.Any()) { if (midasLibSpec == null) { // Need to add MIDAS LibSpec to document midasLibSpec = (MidasLibSpec)LibrarySpec.CreateFromPath(MidasLibSpec.GetName(container.DocumentFilePath, libraries.LibrarySpecs), midasLibPath); } MidasLibrary.AddSpectra(midasLibSpec, missingMidasFiles.Select(f => new MsDataFilePath(f)).ToArray(), docCurrent, new LoadMonitor(this, container, null), out failedMidasFiles); if (failedMidasFiles.Count < missingMidasFiles.Length) { if (!newMidasLibSpec) ReloadLibraries(container, midasLibSpec); midasLibrary = (MidasLibrary) LoadLibrary(midasLibSpec, () => new LoadMonitor(this, container, !newMidasLibSpec ? midasLibSpec : null)); if (midasLibrary != null && !dictLibraries.ContainsKey(midasLibSpec.Name)) dictLibraries.Add(midasLibSpec.Name, midasLibrary); } else { midasLibSpec = null; newMidasLibSpec = false; } } SrmDocument docNew; do { // Look for unloaded libraries in the current document that match // those loaded. docCurrent = container.Document; libraries = docCurrent.Settings.PeptideSettings.Libraries; bool changed = false; var list = new List<Library>(); foreach (LibrarySpec spec in libraries.LibrarySpecs) { if (spec == null) continue; Library libraryExisting = libraries.GetLibrary(spec.Name); Library libraryLoaded; if ((libraryExisting != null && libraryExisting.IsLoaded) || !dictLibraries.TryGetValue(spec.Name, out libraryLoaded)) list.Add(libraryExisting); else { list.Add(libraryLoaded); changed = true; } } // If nothing changed, end without changing the document. if (!changed && !newMidasLibSpec && !failedMidasFiles.Any()) { return false; } docNew = docCurrent; if (newMidasLibSpec) { // We need to add this MIDAS LibrarySpec to the document var libSpecs = libraries.LibrarySpecs.ToList(); libSpecs.Add(midasLibSpec); docNew = docNew.ChangeSettings(docNew.Settings.ChangePeptideLibraries(libs => libs.ChangeLibrarySpecs(libSpecs))); libraries = docNew.Settings.PeptideSettings.Libraries; list.Add(midasLibrary); docNew.Settings.UpdateLists(container.DocumentFilePath); // Switch to pick by filter if there are no other libraries if (libSpecs.Count == 1) { libraries = libraries .ChangeRankId(null) .ChangePick(PeptidePick.filter); docNew = docNew.ChangeSettings(docNew.Settings.ChangeTransitionSettings( settings => settings.ChangeLibraries(settings.Libraries.ChangePick(TransitionLibraryPick.none)))); } } libraries = libraries.ChangeLibraries(list.ToArray()); if (missingMidasFiles.Any() && docNew.Settings.HasResults) { var newChromatograms = MidasLibrary.UnflagFiles(docNew.Settings.MeasuredResults.Chromatograms, missingMidasFiles.Select(Path.GetFileName)).ToList(); if (!ArrayUtil.ReferencesEqual(docNew.Settings.MeasuredResults.Chromatograms, newChromatograms)) { docNew = docNew.ChangeMeasuredResults(docNew.Settings.MeasuredResults.ChangeChromatograms(newChromatograms)); } } using (var settingsChangeMonitor = new SrmSettingsChangeMonitor( new LoadMonitor(this, container, null), Resources.LibraryManager_LoadBackground_Updating_library_settings_for__0_, container, docCurrent)) { try { docNew = docNew.ChangeSettings(docNew.Settings.ChangePeptideSettings( docNew.Settings.PeptideSettings.ChangeLibraries(libraries)), settingsChangeMonitor); } catch (InvalidDataException x) { settingsChangeMonitor.ChangeProgress(s => s.ChangeErrorException(x)); break; } catch (OperationCanceledException) { docNew = docCurrent; // Just continue } } } while (!CompleteProcessing(container, docNew, docCurrent)); } finally { foreach (var library in dictLibraries.Values.Where(lib => lib.ReadStream != null)) { lock (library.ReadStream) { library.ReadStream.CloseStream(); } } EndProcessing(docCurrent); } return true; } public Library LoadLibrary(LibrarySpec spec, Func<ILoadMonitor> getMonitor) { LibraryLoadLock loadLock; lock (_loadedLibraries) { Library library; if (_loadedLibraries.TryGetValue(spec.Name, out library)) { if (Equals(spec, library.CreateSpec(library.FileNameHint))) { return library; } else { _loadedLibraries.Remove(spec.Name); } } // If the library has not yet been loaded, then create a new lock // for everyone to wait on until the library has been loaded. if (!_loadingLibraries.TryGetValue(spec.Name, out loadLock)) { loadLock = new LibraryLoadLock(); _loadingLibraries.Add(spec.Name, loadLock); } } lock (loadLock) { if (!loadLock.IsLoaded) { loadLock.Library = spec.LoadLibrary(getMonitor()); loadLock.IsLoaded = true; } } lock (_loadedLibraries) { _loadingLibraries.Remove(spec.Name); if (loadLock.Library != null) { // Update the newly loaded library in the dictionary, regardless of whether // we were the thread that actually did the loading. _loadedLibraries[spec.Name] = loadLock.Library; } return loadLock.Library; } } private Library LoadLibrary(IDocumentContainer container, LibrarySpec spec) { return LoadLibrary(spec, () => new LoadMonitor(this, container, spec)); } public void ReloadLibraries(IDocumentContainer container, params LibrarySpec[] specs) { lock (_loadedLibraries) { foreach (var spec in specs) { _loadedLibraries.Remove(spec.Name); } ForDocumentLibraryReload(container, specs.Select(spec => spec.Name).ToArray()); } } public void ReleaseLibraries(params LibrarySpec[] specs) { lock (_loadedLibraries) { foreach (var spec in specs) { _loadedLibraries.Remove(spec.Name); } } } public void UnloadChangedLibraries(IEnumerable<LibrarySpec> specs) { lock (_loadedLibraries) { foreach (var spec in specs) { Library library; if (_loadedLibraries.TryGetValue(spec.Name, out library)) { var specCompare = library.CreateSpec(library.FileNameHint); if (!Equals(spec, specCompare)) { _loadedLibraries.Remove(spec.Name); } } } } } public Library TryGetLibrary(LibrarySpec spec) { lock (_loadedLibraries) { Library library; _loadedLibraries.TryGetValue(spec.Name, out library); return library; } } public delegate bool BuildFunction(IDocumentContainer documentContainer, ILibraryBuilder libraryBuilder, IProgressMonitor monitor, BuildState buildState); public sealed class BuildState { public BuildState(LibrarySpec librarySpec, BuildFunction buildFunc) { LibrarySpec = librarySpec; BuildFunc = buildFunc; } public LibrarySpec LibrarySpec { get; private set; } public BuildFunction BuildFunc { get; private set; } public string BuildCommandArgs { get; set; } public string BuildOutput { get; set; } public string ExtraMessage { get; set; } public IrtStandard IrtStandard { get; set; } } public void BuildLibrary(IDocumentContainer container, ILibraryBuilder builder, Action<BuildState, bool> callback) { var monitor = new LibraryBuildMonitor(this, container); var buildState = new BuildState(builder.LibrarySpec, BuildLibraryBackground); ActionUtil.RunAsync(() => callback(buildState, BuildLibraryBackground(container, builder, monitor, buildState)), @"Library Build"); } public bool BuildLibraryBackground(IDocumentContainer container, ILibraryBuilder builder, IProgressMonitor monitor, BuildState buildState) { LocalizationHelper.InitThread(); // Avoid building a library that is loading or allowing the library to be loaded // while it is building LibraryLoadLock loadLock; lock (_loadedLibraries) { if (!_loadingLibraries.TryGetValue(builder.LibrarySpec.Name, out loadLock)) { loadLock = new LibraryLoadLock(); _loadingLibraries.Add(builder.LibrarySpec.Name, loadLock); } } bool success; lock (loadLock) { success = builder.BuildLibrary(monitor); var iRTCapableBuilder = builder as IiRTCapableLibraryBuilder; if (null != iRTCapableBuilder) { buildState.BuildCommandArgs = iRTCapableBuilder.BuildCommandArgs; buildState.BuildOutput = iRTCapableBuilder.BuildOutput; if (!string.IsNullOrEmpty(iRTCapableBuilder.AmbiguousMatchesMessage)) { buildState.ExtraMessage = iRTCapableBuilder.AmbiguousMatchesMessage; } if (iRTCapableBuilder.IrtStandard != null && !iRTCapableBuilder.IrtStandard.Name.Equals(IrtStandard.EMPTY.Name)) { buildState.IrtStandard = iRTCapableBuilder.IrtStandard; } } } lock (_loadedLibraries) { _loadingLibraries.Remove(builder.LibrarySpec.Name); if (success) { // If the library was already loaded, make sure the new copy // replaces the load in the library load cache. string name = builder.LibrarySpec.Name; _loadedLibraries.Remove(name); // If the current document contains the newly built library, // make sure it is reloaded into the document, by resetting all // library-specs. Do this inside the lock to avoid library loading // happening during this check. ForDocumentLibraryReload(container, new[] {name}); } return success; } } private static void ForDocumentLibraryReload(IDocumentContainer container, string[] specs) { var docOriginal = container.Document; if (docOriginal == null) return; var librarySettings = docOriginal.Settings.PeptideSettings.Libraries; if (!librarySettings.HasLibraries) return; int iSpec = librarySettings.LibrarySpecs.IndexOf(spec => spec != null && specs.Contains(spec.Name)); if (iSpec == -1 || librarySettings.Libraries[iSpec] == null) return; SrmDocument docNew; do { docOriginal = container.Document; var settings = docOriginal.Settings.ChangePeptideLibraries( lib => { var listLib = new List<Library>(lib.Libraries); int i = lib.LibrarySpecs.IndexOf(spec => specs.Contains(spec.Name)); if (i != -1) listLib[i] = null; return lib.ChangeLibraries(listLib); }); docNew = docOriginal.ChangeSettings(settings); } while (!container.SetDocument(docNew, docOriginal)); } private class LibraryBuildMonitor : IProgressMonitor { private readonly LibraryManager _manager; // Might want this someday... // ReSharper disable NotAccessedField.Local private readonly IDocumentContainer _container; // ReSharper restore NotAccessedField.Local public LibraryBuildMonitor(LibraryManager manager, IDocumentContainer container) { _manager = manager; _container = container; } // TODO: Some way to cancel a library build public bool IsCanceled { get { return false; } } public UpdateProgressResponse UpdateProgress(IProgressStatus status) { return _manager.UpdateProgress(status); } public bool HasUI { get { return false; } } } } /// <summary> /// Implement on a class for building a specific type of library. /// </summary> public interface ILibraryBuilder { /// <summary> /// Build the library with progress monitoring, and the ability /// to cancel. /// </summary> /// <param name="progress">Sink for progress updates, and source of user cancel status</param> bool BuildLibrary(IProgressMonitor progress); /// <summary> /// A <see cref="LibrarySpec"/> referencing the library to be built. /// </summary> LibrarySpec LibrarySpec { get; } } public enum LibraryRedundancy { best, all, all_redundant } public abstract class Library : XmlNamedElement { protected Library(LibrarySpec spec) : base(spec.Name) { FileNameHint = Path.GetFileName(spec.FilePath); UseExplicitPeakBounds = spec.UseExplicitPeakBounds; } /// <summary> /// Original file name used to create this library, for use in finding /// the library, if its identifying name is not present in the /// <see cref="SpectralLibraryList"/> /// </summary> public string FileNameHint { get; private set; } public bool UseExplicitPeakBounds { get; private set; } /// <summary> /// Creates the appropriate library spec for this library, given a path /// to the library. /// </summary> /// <param name="path">Path to the library file on disk</param> /// <returns>A new <see cref="LibrarySpec"/></returns> public virtual LibrarySpec CreateSpec(string path) { return CreateSpec().ChangeFilePath(path) .ChangeUseExplicitPeakBounds(UseExplicitPeakBounds); } protected abstract LibrarySpec CreateSpec(); /// <summary> /// Returns the filter string to be used for finding a library of this type. /// </summary> public abstract string SpecFilter { get; } /// <summary> /// Returns the <see cref="IPooledStream"/> for the stream on which this library /// relies for its data reading. /// </summary> public abstract IPooledStream ReadStream { get; } /// <summary> /// Returns all open <see cref="IPooledStream"/> associated with the library. /// Default implementation returns the single stream from <see cref="ReadStream"/>. /// </summary> public virtual IEnumerable<IPooledStream> ReadStreams { get { if (ReadStream != null) yield return ReadStream; } } /// <summary> /// True if this library is loaded and may be used to query spectral /// data. False if it is merely a placeholder loaded from a document /// which has not yet been connected to the actual library data. /// </summary> public bool IsLoaded { get { return IsNotLoadedExplained == null; } } /// <summary> /// Same as IsLoaded property, but returns a non-null and hopefully useful message /// for test purposes when not loaded. /// </summary> public abstract string IsNotLoadedExplained { get; } /// <summary> /// Determines if this library identifies itself as being the same /// as another library. /// </summary> /// <param name="library">Library to check for identity</param> /// <returns>True if the libraries have the same identity</returns> public abstract bool IsSameLibrary(Library library); /// <summary> /// Used to determine relative ordering of this library with another /// in an odered progression of revisions. This check is only valid /// if <see cref="IsSameLibrary"/> is true for the library parameter. /// </summary> /// <param name="library">Library to compare revisions with</param> /// <returns>0 if revisions are equal, /// 1 if the given library is new than this, /// -1 if the given library is older than this</returns> public abstract int CompareRevisions(Library library); /// <summary> /// Determines if the library contains a specific (modified sequence, charge) pair. /// </summary> /// <param name="key">A sequence, charge pair</param> /// <returns>True if the library contains the key</returns> public abstract bool Contains(LibKey key); /// <summary> /// Determines if the library contains any spectra for a peptide, based on its /// unmodified amino acid sequence. /// </summary> /// <param name="target">An unmodified sequence</param> /// <returns>True if the library contains any spectra for this peptide regardless of modification or charge</returns> public abstract bool ContainsAny(Target target); /// <summary> /// Some details for the library. /// This can be the library revision, program version, /// build date or a hyperlink to the library source /// (e.g. http://peptide.nist.gov/ for NIST libraries) /// </summary> public abstract LibraryDetails LibraryDetails { get; } /// <summary> /// Only contains paths for files in library /// Unlike LibraryDetails which contains more information /// </summary> public abstract LibraryFiles LibraryFiles { get; } /// <summary> /// Attempts to get spectrum header information for a specific /// (sequence, charge) pair. /// </summary> /// <param name="key">A sequence, charge pair</param> /// <param name="libInfo">The spectrum header information, if successful</param> /// <returns>True if the library contains the key</returns> public abstract bool TryGetLibInfo(LibKey key, out SpectrumHeaderInfo libInfo); /// <summary> /// Attempts to get spectrum peak information for a specific /// (sequence, charge) pair. /// </summary> /// <param name="key">A sequence, charge pair</param> /// <param name="spectrum">The spectrum peak information, if successful</param> /// <returns>True if the spectrum was retrieved successfully</returns> public abstract bool TryLoadSpectrum(LibKey key, out SpectrumPeaksInfo spectrum); /// <summary> /// Loads a spectrum given a key provided by the library. /// </summary> /// <param name="spectrumKey">A key that uniquely identifies the spectrum</param> /// <returns>The requested spectrum peak information</returns> public abstract SpectrumPeaksInfo LoadSpectrum(object spectrumKey); public virtual LibraryChromGroup LoadChromatogramData(object spectrumKey) { return null; } /// <summary> /// Attempts to get retention time information for a specific /// (sequence, charge) pair and file. /// </summary> /// <param name="key">A sequence, charge pair</param> /// <param name="filePath">A file for which the retention information is requested</param> /// <param name="retentionTimes">A list of retention times, if successful</param> /// <returns>True if retention time information was retrieved successfully</returns> public abstract bool TryGetRetentionTimes(LibKey key, MsDataFileUri filePath, out double[] retentionTimes); /// <summary> /// Attempts to get retention time information for all of the /// (sequence, charge) pairs identified from a specific file. /// </summary> /// <param name="filePath">A file for which the retention time information is requested</param> /// <param name="retentionTimes"></param> /// <returns>True if retention time information was retrieved successfully</returns> public abstract bool TryGetRetentionTimes(MsDataFileUri filePath, out LibraryRetentionTimes retentionTimes); /// <summary> /// Attempts to get retention time information for all of the /// (sequence, charge) pairs identified from a specific file by index. /// </summary> /// <param name="fileIndex">Index of a file for which the retention time information is requested</param> /// <param name="retentionTimes"></param> /// <returns>True if retention time information was retrieved successfully</returns> public abstract bool TryGetRetentionTimes(int fileIndex, out LibraryRetentionTimes retentionTimes); /// <summary> /// If an explicit peak boundary has been set for any of the peptide sequences, then return /// that peak boundary. /// </summary> public virtual ExplicitPeakBounds GetExplicitPeakBounds(MsDataFileUri filePath, IEnumerable<Target> peptideSequences) { return null; } /// <summary> /// Attempts to get iRT information from the library. /// </summary> /// <param name="retentionTimes">A list of iRTs, if successful</param> /// <returns>True if iRT information was retrieved successfully</returns> public abstract bool TryGetIrts(out LibraryRetentionTimes retentionTimes); public virtual IEnumerable<double> GetRetentionTimesWithSequences(string filePath, IEnumerable<Target> peptideSequences, ref int? fileIndex) { return new double[0]; } /// <summary> /// Attempts to get ion mobility information for a specific /// (sequence, charge) pair and file. /// </summary> /// <param name="key">A sequence, charge pair</param> /// <param name="filePath">A file for which the ion mobility information is requested</param> /// <param name="ionMobilities">A list of ion mobility info, if successful</param> /// <returns>True if ion mobility information was retrieved successfully</returns> public abstract bool TryGetIonMobilityInfos(LibKey key, MsDataFileUri filePath, out IonMobilityAndCCS[] ionMobilities); /// <summary> /// Attempts to get ion mobility information for selected /// (sequence, charge) pairs identified from a specific file. /// </summary> /// <param name="targetIons">A list of sequence, charge pairs</param> /// <param name="filePath">A file for which the ion mobility information is requested</param> /// <param name="ionMobilities">A list of ion mobility info, if successful</param> /// <returns>True if ion mobility information was retrieved successfully</returns> public abstract bool TryGetIonMobilityInfos(LibKey[] targetIons, MsDataFileUri filePath, out LibraryIonMobilityInfo ionMobilities); /// <summary> /// Attempts to get ion mobility information for all of the /// (sequence, charge) pairs identified from a specific file by index. /// </summary> /// <param name="targetIons">A list of sequence, charge pairs</param> /// <param name="fileIndex">Index of a file for which the ion mobility information is requested</param> /// <param name="ionMobilities">A list of ion mobility info, if successful</param> /// <returns>True if ion mobility information was retrieved successfully</returns> public abstract bool TryGetIonMobilityInfos(LibKey[] targetIons, int fileIndex, out LibraryIonMobilityInfo ionMobilities); /// <summary> /// Attempts to get ion mobility information for all of the /// (sequence, charge) pairs identified from all files. /// </summary> /// <param name="targetIons">A list of sequence, charge pairs</param> /// <param name="ionMobilities">A list of ion mobility info, if successful</param> /// <returns>True if ion mobility information was retrieved successfully</returns> public abstract bool TryGetIonMobilityInfos(LibKey[] targetIons, out LibraryIonMobilityInfo ionMobilities); /// <summary> /// Gets all of the spectrum information for a particular (sequence, charge) pair. This /// may include redundant spectra. The spectrum points themselves are only loaded as it they /// requested to give this function acceptable performance. /// </summary> /// <param name="key">The sequence, charge pair requested</param> /// <param name="labelType">An <see cref="IsotopeLabelType"/> for which to get spectra</param> /// <param name="redundancy">Level of redundancy requested in returned values</param> /// <returns>An enumeration of <see cref="SpectrumInfo"/></returns> public abstract IEnumerable<SpectrumInfoLibrary> GetSpectra(LibKey key, IsotopeLabelType labelType, LibraryRedundancy redundancy); /// <summary> /// Returns the number of files or mass spec runs for which this library /// contains spectra, or null if this is unknown. /// </summary> public abstract int? FileCount { get; } /// <summary> /// Returns the total number of spectra loaded from the library. /// </summary> public abstract int SpectrumCount { get; } /// <summary> /// Returns an enumerator for the keys of the spectra loaded from the library. /// </summary> public abstract IEnumerable<LibKey> Keys { get; } /// <summary> /// Returns a list of <see cref="RetentionTimeSource"/> objects representing /// the data files that this Library can provide peptide retention time /// values for. /// </summary> public virtual IList<RetentionTimeSource> ListRetentionTimeSources() { return new RetentionTimeSource[0]; } public IEnumerable<IRetentionTimeProvider> RetentionTimeProvidersIrt { get { LibraryRetentionTimes irts; if (TryGetIrts(out irts)) yield return irts; } } public IEnumerable<IRetentionTimeProvider> RetentionTimeProviders { get { var fileCount = FileCount; if (!fileCount.HasValue) yield break; for (var i = 0; i < fileCount.Value; i++) { LibraryRetentionTimes retentionTimes; if (TryGetRetentionTimes(i, out retentionTimes)) yield return retentionTimes; } } } #region File reading utility functions protected internal static int GetInt32(byte[] bytes, int index, int offset = 0) { int ibyte = offset + index * 4; return bytes[ibyte] | bytes[ibyte + 1] << 8 | bytes[ibyte + 2] << 16 | bytes[ibyte + 3] << 24; } protected static float GetSingle(byte[] bytes, int index) { return BitConverter.ToSingle(bytes, index * 4); } protected static int ReadSize(Stream stream) { byte[] libSize = new byte[4]; ReadComplete(stream, libSize, libSize.Length); return GetInt32(libSize, 0); } protected static string ReadString(Stream stream, int countBytes) { byte[] stringBytes = new byte[countBytes]; ReadComplete(stream, stringBytes, countBytes); return Encoding.UTF8.GetString(stringBytes); } protected static void ReadComplete(Stream stream, byte[] buffer, int size) { if (stream.Read(buffer, 0, size) != size) throw new InvalidDataException(Resources.Library_ReadComplete_Data_truncation_in_library_header_File_may_be_corrupted); } protected static void SafeReadComplete(Stream stream, ref byte[] buffer, int size) { if (size > buffer.Length) buffer = new byte[size]; if (stream.Read(buffer, 0, size) != size) throw new InvalidDataException(Resources.Library_ReadComplete_Data_truncation_in_library_header_File_may_be_corrupted); } #endregion protected bool Equals(Library other) { return base.Equals(other) && string.Equals(FileNameHint, other.FileNameHint) && UseExplicitPeakBounds == other.UseExplicitPeakBounds; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((Library) obj); } public override int GetHashCode() { unchecked { int hashCode = base.GetHashCode(); hashCode = (hashCode * 397) ^ (FileNameHint != null ? FileNameHint.GetHashCode() : 0); hashCode = (hashCode * 397) ^ UseExplicitPeakBounds.GetHashCode(); return hashCode; } } #region Implementation of IXmlSerializable /// <summary> /// For XML serialization /// </summary> protected Library() { } private enum ATTR { file_name_hint, use_explicit_peak_bounds } public override void ReadXml(XmlReader reader) { // Read tag attributes base.ReadXml(reader); FileNameHint = reader.GetAttribute(ATTR.file_name_hint); UseExplicitPeakBounds = reader.GetBoolAttribute(ATTR.use_explicit_peak_bounds, true); } public override void WriteXml(XmlWriter writer) { // Write tag attributes base.WriteXml(writer); writer.WriteAttributeIfString(ATTR.file_name_hint, FileNameHint); writer.WriteAttribute(ATTR.use_explicit_peak_bounds, UseExplicitPeakBounds, true); } #endregion } public interface ICachedSpectrumInfo { LibKey Key { get; } } public abstract class CachedLibrary<TInfo> : Library where TInfo : ICachedSpectrumInfo { protected CachedLibrary() { } protected CachedLibrary(LibrarySpec spec) : base(spec) { } protected LibKeyMap<TInfo> _libraryEntries; protected string CachePath { get; set; } public override string IsNotLoadedExplained { get { return (_libraryEntries != null) ? null : @"no library entries"; } } public override bool ContainsAny(Target target) { return _libraryEntries.ItemsWithUnmodifiedSequence(target).Any(); } public override bool Contains(LibKey key) { return FindEntry(key) != -1; } protected int FindExactEntry(LibKey key) { if (_libraryEntries == null) return -1; return _libraryEntries.IndexOf(key.LibraryKey); } protected int FindEntry(LibKey key) { if (_libraryEntries == null) { return -1; } foreach (var entry in _libraryEntries.Index.ItemsMatching(key, true)) { return entry.OriginalIndex; } return -1; } protected virtual void SetLibraryEntries(IEnumerable<TInfo> entries) { var entryList = ImmutableList.ValueOf(entries); _libraryEntries = new LibKeyMap<TInfo>(entryList, entryList.Select(entry=>entry.Key.LibraryKey)); } protected List<TInfo> FilterInvalidLibraryEntries(ref IProgressStatus status, IEnumerable<TInfo> entries) { var validEntries = new List<TInfo>(); var invalidKeys = new List<LibKey>(); foreach (var entry in entries) { if (!IsValidLibKey(entry.Key)) { invalidKeys.Add(entry.Key); } else { validEntries.Add(entry); } } status = WarnInvalidEntries(status, validEntries.Count, invalidKeys); return validEntries; } protected bool IsValidLibKey(LibKey libKey) { try { var unused = libKey.LibraryKey.CreatePeptideIdentityObj(); return true; } catch (Exception) { return false; } } protected IProgressStatus WarnInvalidEntries(IProgressStatus progressStatus, int validEntryCount, ICollection<LibKey> invalidEntries) { if (invalidEntries.Count == 0) { return progressStatus; } var invalidText = TextUtil.LineSeparate(invalidEntries.Take(10).Select(key => key.ToString())); string warningMessage = string.Format(Resources.CachedLibrary_WarnInvalidEntries_, Name, invalidEntries.Count, invalidEntries.Count + validEntryCount, invalidText); progressStatus = progressStatus.ChangeWarningMessage(warningMessage); return progressStatus; } public override bool TryGetLibInfo(LibKey key, out SpectrumHeaderInfo libInfo) { var index = FindEntry(key); if (index != -1) { libInfo = CreateSpectrumHeaderInfo(_libraryEntries[index]); return true; } libInfo = null; return false; } protected abstract SpectrumHeaderInfo CreateSpectrumHeaderInfo(TInfo info); public override bool TryLoadSpectrum(LibKey key, out SpectrumPeaksInfo spectrum) { int i = FindEntry(key); if (i != -1) { var spectrumPeaks = ReadSpectrum(_libraryEntries[i]); if (spectrumPeaks != null) { spectrum = new SpectrumPeaksInfo(spectrumPeaks); return true; } } spectrum = null; return false; } public override SpectrumPeaksInfo LoadSpectrum(object spectrumKey) { var spectrumPeaks = ReadSpectrum(_libraryEntries[(int)spectrumKey]); if (spectrumPeaks == null) throw new IOException(string.Format(Resources.CachedLibrary_LoadSpectrum_Library_entry_not_found__0__, spectrumKey)); return new SpectrumPeaksInfo(spectrumPeaks); } protected abstract SpectrumPeaksInfo.MI[] ReadSpectrum(TInfo info); public override LibraryChromGroup LoadChromatogramData(object spectrumKey) { return ReadChromatogram(_libraryEntries[(int) spectrumKey]); } protected virtual LibraryChromGroup ReadChromatogram(TInfo info) { return null; } public override bool TryGetRetentionTimes(LibKey key, MsDataFileUri filePath, out double[] retentionTimes) { // By default, no retention time information is available retentionTimes = null; return false; } public override bool TryGetRetentionTimes(MsDataFileUri filePath, out LibraryRetentionTimes retentionTimes) { // By default, no retention time information is available retentionTimes = null; return false; } public override bool TryGetRetentionTimes(int fileIndex, out LibraryRetentionTimes retentionTimes) { // By default, no retention time information is available retentionTimes = null; return false; } public override bool TryGetIrts(out LibraryRetentionTimes retentionTimes) { // By default, no iRT information is available retentionTimes = null; return false; } public override bool TryGetIonMobilityInfos(LibKey key, MsDataFileUri filePath, out IonMobilityAndCCS[] ionMobilities) { // By default, no ion mobility information is available ionMobilities = null; return false; } public override bool TryGetIonMobilityInfos(LibKey[] targetIons, MsDataFileUri filePath, out LibraryIonMobilityInfo ionMobilities) { // By default, no ion mobility information is available ionMobilities = null; return false; } public override bool TryGetIonMobilityInfos(LibKey[] targetIons, int fileIndex, out LibraryIonMobilityInfo ionMobilities) { // By default, no ion mobility information is available ionMobilities = null; return false; } public override bool TryGetIonMobilityInfos(LibKey[] targetIons, out LibraryIonMobilityInfo ionMobilities) { // By default, no ion mobility information is available ionMobilities = null; return false; } public override IEnumerable<SpectrumInfoLibrary> GetSpectra(LibKey key, IsotopeLabelType labelType, LibraryRedundancy redundancy) { // This base class only handles best match spectra if (redundancy == LibraryRedundancy.best) { int i = FindEntry(key); if (i != -1) { yield return new SpectrumInfoLibrary(this, labelType, i) { SpectrumHeaderInfo = CreateSpectrumHeaderInfo(_libraryEntries[i]) }; } } } public override int? FileCount { get { return null; } } public override int SpectrumCount { get { return _libraryEntries == null ? 0 : _libraryEntries.Count; } } public override IEnumerable<LibKey> Keys { get { if (IsLoaded) foreach (var entry in _libraryEntries) yield return entry.Key; } } protected IEnumerable<TInfo> LibraryEntriesWithSequences(IEnumerable<Target> peptideSequences) { return peptideSequences.SelectMany(LibraryEntriesWithSequence); } protected IEnumerable<TInfo> LibraryEntriesWithSequence(Target target) { return _libraryEntries.ItemsMatching(new LibKey(target, Adduct.EMPTY).LibraryKey, false); } // ReSharper disable PossibleMultipleEnumeration protected int FindFileInList(MsDataFileUri sourceFile, IEnumerable<string> fileNames) { if (fileNames == null) { return -1; } string sourceFileToString = sourceFile.ToString(); int iFile = 0; foreach (var fileName in fileNames) { if (fileName.Equals(sourceFileToString)) { return iFile; } iFile++; } string baseName = sourceFile.GetFileNameWithoutExtension(); iFile = 0; foreach (var fileName in fileNames) { try { if (MeasuredResults.IsBaseNameMatch(baseName, Path.GetFileNameWithoutExtension(fileName))) { return iFile; } } catch (Exception) { // Ignore: Invalid filename } iFile++; } return -1; } // ReSharper restore PossibleMultipleEnumeration } public sealed class LibraryRetentionTimes : IRetentionTimeProvider { private readonly TargetMap<Tuple<TimeSource, double[]>> _dictPeptideRetentionTimes; public LibraryRetentionTimes(string path, IDictionary<Target, Tuple<TimeSource, double[]>> dictPeptideRetentionTimes) { Name = path; _dictPeptideRetentionTimes = new TargetMap<Tuple<TimeSource, double[]>>(dictPeptideRetentionTimes); if (_dictPeptideRetentionTimes.Count == 0) { MinRt = MaxRt = 0; } else { MinRt = _dictPeptideRetentionTimes.SelectMany(p => p.Value.Item2).Min(); MaxRt = _dictPeptideRetentionTimes.SelectMany(p => p.Value.Item2).Max(); } var listStdev = new List<double>(); foreach (Tuple<TimeSource, double[]> times in _dictPeptideRetentionTimes.Values) { if (times.Item2.Length < 2) continue; var statTimes = new Statistics(times.Item2); listStdev.Add(statTimes.StdDev()); } var statStdev = new Statistics(listStdev); MeanStdev = statStdev.Mean(); } public string Name { get; private set; } public double MinRt { get; private set; } public double MaxRt { get; private set; } public double MeanStdev { get; private set; } /// <summary> /// Returns all retention times for spectra that were identified to a /// specific modified peptide sequence. /// </summary> public double[] GetRetentionTimes(Target sequence) { Tuple<TimeSource, double[]> retentionTimes; if (_dictPeptideRetentionTimes.TryGetValue(sequence, out retentionTimes)) return retentionTimes.Item2; return new double[0]; } /// <summary> /// Return the average retention time for spectra that were identified to a /// specific modified peptide sequence, with filtering applied in an attempt /// to avoid peptides eluting a second time near the end of the gradient. /// </summary> public double? GetRetentionTime(Target sequence) { double[] retentionTimes = GetRetentionTimes(sequence); if (retentionTimes.Length == 0) return null; if (retentionTimes.Length == 1) return retentionTimes[0]; double meanTimes = retentionTimes[0]; // Anything 3 times the mean standard deviation away from the mean is suspicious double maxDelta = MeanStdev*3; for (int i = 1; i < retentionTimes.Length; i++) { double time = retentionTimes[i]; double delta = time - meanTimes; // If the time is more than the max delta from the other times, and closer // to the end than to the other times, then do not include it or anything // after it. if (delta > maxDelta && delta > MaxRt - time) { double[] subsetTimes = new double[i]; Array.Copy(retentionTimes, subsetTimes, i); retentionTimes = subsetTimes; break; } // Adjust the running mean. meanTimes += (time - meanTimes)/(i+1); } var statTimes = new Statistics(retentionTimes); return statTimes.Median(); } public TimeSource? GetTimeSource(Target sequence) { Tuple<TimeSource, double[]> value; if (_dictPeptideRetentionTimes.TryGetValue(sequence, out value)) { return value.Item1; } return null; } public IEnumerable<MeasuredRetentionTime> PeptideRetentionTimes { get { return from sequence in _dictPeptideRetentionTimes.Keys let time = GetRetentionTime(sequence) where time.HasValue select new MeasuredRetentionTime(sequence, time.Value, true); } } public IDictionary<Target, double> GetFirstRetentionTimes() { var dict = new Dictionary<Target, double>(); foreach (var entry in _dictPeptideRetentionTimes) { if (entry.Value.Item2.Length == 0) { continue; } dict.Add(entry.Key, entry.Value.Item2.Min()); } return dict; } } public sealed class LibraryIonMobilityInfo : IIonMobilityInfoProvider { private readonly LibKeyMap<IonMobilityAndCCS[]> _dictLibKeyIonMobility; public static LibraryIonMobilityInfo EMPTY = new LibraryIonMobilityInfo(String.Empty, false, new Dictionary<LibKey, IonMobilityAndCCS[]>()); public LibraryIonMobilityInfo(string path, bool supportMultipleConformers, IDictionary<LibKey, IonMobilityAndCCS[]> dict) : this(path, supportMultipleConformers, new LibKeyMap<IonMobilityAndCCS[]>( ImmutableList.ValueOf(dict.Values), dict.Keys.Select(key=>key.LibraryKey))) { } public LibraryIonMobilityInfo(string path, bool supportMultipleConformers, LibKeyMap<IonMobilityAndCCS[]> dictLibKeyIonMobility) { Name = path ?? string.Empty; SupportsMultipleConformers = supportMultipleConformers; _dictLibKeyIonMobility = dictLibKeyIonMobility; } public string Name { get; private set; } public bool SupportsMultipleConformers { get; private set; } // If false, average any redundancies (as with spectral libraries) public bool IsEmpty { get { return _dictLibKeyIonMobility == null || _dictLibKeyIonMobility.Count == 0;} } /// <summary> /// Return the median measured CCS for spectra that were identified with a /// specific modified peptide sequence and charge state. /// </summary> public double? GetLibraryMeasuredCollisionalCrossSection(LibKey chargedPeptide) { IonMobilityAndCCS[] ionMobilities; if ((!_dictLibKeyIonMobility.TryGetValue(chargedPeptide, out ionMobilities)) || (ionMobilities == null)) return null; double? ccs = null; var ccsValues = Array.FindAll(ionMobilities, im => im.HasCollisionalCrossSection); if (ccsValues.Any()) { ccs = new Statistics(ccsValues.Select(im => im.CollisionalCrossSectionSqA.Value)).Median(); } return ccs; } /// <summary> /// Return the median measured ion mobility for spectra that were identified with a /// specific modified peptide sequence and charge state. Prefer to use median CCS /// when possible, and calculate IM from that. If only IM values are available, convert /// to CCS if possible. /// CONSIDER: when we support multiple conformers, is there maybe some difference magnitude at which we should not be averaging (based on resolving power maybe)? /// </summary> public IonMobilityAndCCS GetLibraryMeasuredIonMobilityAndCCS(LibKey chargedPeptide, double mz, IIonMobilityFunctionsProvider ionMobilityFunctionsProvider) { IonMobilityAndCCS[] ionMobilities; if ((!_dictLibKeyIonMobility.TryGetValue(chargedPeptide, out ionMobilities)) || (ionMobilities == null)) return IonMobilityAndCCS.EMPTY; IonMobilityValue ionMobility = IonMobilityValue.EMPTY; double? ccs = null; var ionMobilityInfos = ionMobilityFunctionsProvider != null ? Array.FindAll(ionMobilities, im => im.HasCollisionalCrossSection) : null; if (ionMobilityInfos != null && ionMobilityInfos.Any() && ionMobilityFunctionsProvider.ProvidesCollisionalCrossSectionConverter) { // Use median CCS to calculate an ion mobility value ccs = new Statistics(ionMobilityInfos.Select(im => im.CollisionalCrossSectionSqA.Value)).Median(); // Median is more tolerant of errors than Average ionMobility = IonMobilityValue.GetIonMobilityValue(ionMobilityFunctionsProvider.IonMobilityFromCCS(ccs.Value, mz, chargedPeptide.Charge).Mobility, ionMobilityFunctionsProvider.IonMobilityUnits); } else { // Use median ion mobility, convert to CCS if available ionMobilityInfos = Array.FindAll(ionMobilities, dt => dt.HasIonMobilityValue); if (ionMobilityInfos.Any()) { var units = ionMobilityInfos.First().IonMobility.Units; var medianValue = new Statistics(ionMobilityInfos.Select(im => im.IonMobility.Mobility.Value)).Median(); // Median is more tolerant of errors than Average ionMobility = IonMobilityValue.GetIonMobilityValue(medianValue, units); if (ionMobilityFunctionsProvider != null && ionMobilityFunctionsProvider.ProvidesCollisionalCrossSectionConverter) { ccs = ionMobilityFunctionsProvider.CCSFromIonMobility(ionMobility, mz, chargedPeptide.Charge); } else // No mobility -> conversion provided, just return median CCS { var ccsValues = ionMobilityInfos.Where(im => im.HasCollisionalCrossSection) .Select(im => im.CollisionalCrossSectionSqA.Value).ToArray(); if (ccsValues.Any()) { ccs = new Statistics(ccsValues).Median(); // Median is more tolerant of errors than Average } } } } if (!ionMobility.HasValue) return IonMobilityAndCCS.EMPTY; var highEnergyDriftTimeOffsetMsec = new Statistics(ionMobilityInfos.Where(im => im.HighEnergyIonMobilityValueOffset.HasValue).Select(im => im.HighEnergyIonMobilityValueOffset.Value)).Median(); // Median is more tolerant of errors than Average return IonMobilityAndCCS.GetIonMobilityAndCCS(ionMobility, ccs, highEnergyDriftTimeOffsetMsec); } public IDictionary<LibKey, IonMobilityAndCCS[]> GetIonMobilityDict() { return _dictLibKeyIonMobility.AsDictionary(); } } public abstract class LibrarySpec : XmlNamedElement { public static readonly PeptideRankId PEP_RANK_COPIES = new PeptideRankId(@"Spectrum count", () => Resources.LibrarySpec_PEP_RANK_COPIES_Spectrum_count); public static readonly PeptideRankId PEP_RANK_TOTAL_INTENSITY = new PeptideRankId(@"Total intensity", () => Resources.LibrarySpec_PEP_RANK_TOTAL_INTENSITY_Total_intensity); public static readonly PeptideRankId PEP_RANK_PICKED_INTENSITY = new PeptideRankId(@"Picked intensity", () => Resources.LibrarySpec_PEP_RANK_PICKED_INTENSITY_Picked_intensity); public static LibrarySpec CreateFromPath(string name, string path) { if (PathEx.HasExtension(path, BiblioSpecLiteSpec.EXT)) return new BiblioSpecLiteSpec(name, path); else if (PathEx.HasExtension(path, BiblioSpecLibSpec.EXT)) return new BiblioSpecLibSpec(name, path); else if (PathEx.HasExtension(path, ChromatogramLibrarySpec.EXT)) return new ChromatogramLibrarySpec(name, path); else if (PathEx.HasExtension(path, XHunterLibSpec.EXT)) return new XHunterLibSpec(name, path); else if (PathEx.HasExtension(path, NistLibSpec.EXT)) return new NistLibSpec(name, path); else if (PathEx.HasExtension(path, SpectrastSpec.EXT)) return new SpectrastSpec(name, path); else if (PathEx.HasExtension(path, MidasLibSpec.EXT)) return new MidasLibSpec(name, path); else if (PathEx.HasExtension(path, EncyclopeDiaSpec.EXT)) return new EncyclopeDiaSpec(name, path); return null; } protected LibrarySpec(string name, string path) : base(name) { FilePath = path; UseExplicitPeakBounds = true; } [Track] public AuditLogPath FilePathAuditLog { get { return AuditLogPath.Create(FilePath); } } public string FilePath { get; private set; } /// <summary> /// Returns the filter string to be used for finding a library of this type. /// </summary> public abstract string Filter { get; } /// <summary> /// True if this library spec was created in order to open the current document /// only, and should not be stored long term in the global settings. /// </summary> public bool IsDocumentLocal { get; private set; } /// <summary> /// True if this the document-specific library spec, and should not be stored /// in the global settings. /// </summary> public bool IsDocumentLibrary { get; private set; } public abstract Library LoadLibrary(ILoadMonitor loader); public abstract IEnumerable<PeptideRankId> PeptideRankIds { get; } [Track(defaultValues:typeof(DefaultValuesTrue))] public bool UseExplicitPeakBounds { get; private set; } #region Property change methods public LibrarySpec ChangeFilePath(string prop) { return ChangeProp(ImClone(this), im => im.FilePath = prop); } public LibrarySpec ChangeDocumentLocal(bool prop) { return ChangeProp(ImClone(this), im => im.IsDocumentLocal = prop); } public LibrarySpec ChangeDocumentLibrary(bool prop) { return ChangeProp(ImClone(this), im => im.IsDocumentLibrary = prop).ChangeDocumentLocal(prop); } public LibrarySpec ChangeUseExplicitPeakBounds(bool prop) { return ChangeProp(ImClone(this), im => im.UseExplicitPeakBounds = prop); } #endregion #region Implementation of IXmlSerializable /// <summary> /// For XML serialization /// </summary> protected LibrarySpec() { } private enum ATTR { file_path, use_explicit_peak_bounds } public override void ReadXml(XmlReader reader) { // Read tag attributes base.ReadXml(reader); FilePath = reader.GetAttribute(ATTR.file_path); UseExplicitPeakBounds = reader.GetBoolAttribute(ATTR.use_explicit_peak_bounds, true); // Consume tag reader.Read(); } public override void WriteXml(XmlWriter writer) { if (IsDocumentLocal) throw new InvalidOperationException(Resources.LibrarySpec_WriteXml_Document_local_library_specs_cannot_be_persisted_to_XML); if (IsDocumentLibrary) throw new InvalidOperationException(Resources.LibrarySpec_WriteXml_Document_library_specs_cannot_be_persisted_to_XML_); // Write tag attributes base.WriteXml(writer); writer.WriteAttributeString(ATTR.file_path, FilePath); writer.WriteAttribute(ATTR.use_explicit_peak_bounds, UseExplicitPeakBounds, true); } #endregion #region object overrides public bool Equals(LibrarySpec other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return base.Equals(other) && Equals(other.FilePath, FilePath) && other.IsDocumentLocal.Equals(IsDocumentLocal) && other.IsDocumentLibrary.Equals(IsDocumentLibrary) && other.UseExplicitPeakBounds.Equals(UseExplicitPeakBounds); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return Equals(obj as LibrarySpec); } public override int GetHashCode() { unchecked { int result = base.GetHashCode(); result = (result*397) ^ FilePath.GetHashCode(); result = (result*397) ^ IsDocumentLocal.GetHashCode(); result = (result*397) ^ IsDocumentLibrary.GetHashCode(); return result; } } #endregion } /// <summary> /// Identity class for a type peptide ranking, with values for /// displaying in the user interface, and persisting to XML. /// </summary> public sealed class PeptideRankId : IAuditLogObject { public static readonly PeptideRankId PEPTIDE_RANK_NONE = new PeptideRankId(string.Empty, () => string.Empty); private Func<string> _labelFunc; public PeptideRankId(string value, Func<string> labelFunc) { Value = value; _labelFunc = labelFunc; } /// <summary> /// Display text for user interface. /// </summary> public string Label { get { return _labelFunc(); } } /// <summary> /// Name for us in XML. /// </summary> public string Value { get; private set; } public override string ToString() { return Label; } public string AuditLogText { get { return Label; } } public bool IsName { get { return true; } } private bool Equals(PeptideRankId other) { return string.Equals(Label, other.Label) && string.Equals(Value, other.Value); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj is PeptideRankId && Equals((PeptideRankId) obj); } public override int GetHashCode() { unchecked { return ((Label != null ? Label.GetHashCode() : 0) * 397) ^ (Value != null ? Value.GetHashCode() : 0); } } } public abstract class SpectrumHeaderInfo : Immutable, IXmlSerializable { protected SpectrumHeaderInfo(string libraryName) { LibraryName = libraryName; } public string LibraryName { get; private set; } public SpectrumHeaderInfo ChangeLibraryName(string prop) { return ChangeProp(ImClone(this), im => im.LibraryName = prop); } /// <summary> /// Value used in ranking peptides. /// </summary> /// <param name="rankId">Indentifier of the value to return</param> /// <returns>The value to use in ranking</returns> public virtual float GetRankValue(PeptideRankId rankId) { // If super class has not provided a number of copies, return 1. if (ReferenceEquals(rankId, LibrarySpec.PEP_RANK_COPIES)) return 1; return float.MinValue; } public abstract IEnumerable<KeyValuePair<PeptideRankId, string>> RankValues { get; } public string Protein { get; protected set; } // Some .blib and .clib files provide a protein accession (or Molecule List Name for small molecules) #region Implementation of IXmlSerializable /// <summary> /// For XML serialization /// </summary> protected SpectrumHeaderInfo() { } private enum ATTR { library_name, protein } public XmlSchema GetSchema() { return null; } public virtual void ReadXml(XmlReader reader) { // Read tag attributes LibraryName = reader.GetAttribute(ATTR.library_name); Protein = reader.GetAttribute(ATTR.protein); } public virtual void WriteXml(XmlWriter writer) { // Write tag attributes writer.WriteAttributeString(ATTR.library_name, LibraryName); writer.WriteAttributeIfString(ATTR.protein, Protein); } #endregion #region object overrides public bool Equals(SpectrumHeaderInfo obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return Equals(obj.LibraryName, LibraryName) && Equals(obj.Protein, Protein); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != typeof (SpectrumHeaderInfo)) return false; return Equals((SpectrumHeaderInfo) obj); } public override int GetHashCode() { return LibraryName.GetHashCode(); } #endregion } public sealed class TransitionLibInfo { public TransitionLibInfo(int rank, float intensity) { Rank = rank; Intensity = intensity; } public int Rank { get; private set; } public float Intensity { get; private set; } #region object overrides public bool Equals(TransitionLibInfo obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj.Intensity == Intensity && obj.Rank == Rank; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != typeof (TransitionLibInfo)) return false; return Equals((TransitionLibInfo) obj); } public override int GetHashCode() { unchecked { return (Intensity.GetHashCode()*397) ^ Rank; } } #endregion } public sealed class SpectrumPeaksInfo { public SpectrumPeaksInfo(MI[] spectrum) { Peaks = spectrum; } /// <summary> /// This array must be highly performant. Making this class /// <see cref="Immutable"/>, and using a <see cref="ReadOnlyCollection{T}"/> /// caused iteration of this list to show up as a hotspot in /// a profiler. /// </summary> public MI[] Peaks { get; private set; } public IEnumerable<double> MZs { get { foreach (var mi in Peaks) yield return mi.Mz; } } public IEnumerable<double> Intensities { get { foreach (var mi in Peaks) yield return mi.Intensity; } } public IEnumerable<IEnumerable<SpectrumPeakAnnotation>> Annotations { get { foreach (var mi in Peaks) yield return mi.Annotations; } } private bool Equals(SpectrumPeaksInfo other) { return ArrayUtil.EqualsDeep(Peaks, other.Peaks); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj is SpectrumPeaksInfo other && Equals(other); } public override int GetHashCode() { return (Peaks != null ? Peaks.GetHashCode() : 0); } public struct MI { private bool _notQuantitative; private List<SpectrumPeakAnnotation> _annotations; // A peak may have multiple annotations public double Mz { get; set; } public float Intensity { get; set; } public bool Quantitative { get { return !_notQuantitative; } set { _notQuantitative = !value; } } public List<SpectrumPeakAnnotation> Annotations { get { return _annotations; } set { _annotations = value; } } public MI ChangeAnnotations(List<SpectrumPeakAnnotation> newAnnotations) { if (!CollectionUtil.EqualsDeep(newAnnotations, Annotations)) { // Because this is a struct, it does not need to be cloned // This operation will not affect the memory of the original object var result = this; result._annotations = newAnnotations; return result; } return this; } public MI ChangeIntensity(float intensity) { var result = this; result.Intensity = intensity; return result; } public SpectrumPeakAnnotation AnnotationsFirstOrDefault { get { return Annotations == null || Annotations.Count == 0 ? SpectrumPeakAnnotation.EMPTY : Annotations[0] ?? SpectrumPeakAnnotation.EMPTY; } } public IEnumerable<SpectrumPeakAnnotation> GetAnnotationsEnumerator() { if (Annotations == null || Annotations.Count == 0) { yield return SpectrumPeakAnnotation.EMPTY; } else { foreach (var spectrumPeakAnnotation in Annotations) { yield return spectrumPeakAnnotation ?? SpectrumPeakAnnotation.EMPTY; } } } public CustomIon AnnotationsAggregateDescriptionIon { get { if (Annotations != null) { var aggregateName = AnnotationsFirstOrDefault.Ion.Name ?? string.Empty; var nAnnotations = Annotations.Count; for (var i = 1; i < nAnnotations; i++) { var name = Annotations[i].Ion.Name; if (!string.IsNullOrEmpty(name)) { aggregateName += @"/" + name; } } if (!string.IsNullOrEmpty(aggregateName)) { return AnnotationsFirstOrDefault.Ion.ChangeName(aggregateName); } } return AnnotationsFirstOrDefault.Ion; } } public bool Equals(MI other) { return _notQuantitative == other._notQuantitative && ArrayUtil.EqualsDeep(_annotations, other._annotations) && Mz.Equals(other.Mz) && Intensity.Equals(other.Intensity); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; return obj is MI other && Equals(other); } public override int GetHashCode() { unchecked { var hashCode = _notQuantitative.GetHashCode(); hashCode = (hashCode * 397) ^ (_annotations != null ? _annotations.GetHashCode() : 0); hashCode = (hashCode * 397) ^ Mz.GetHashCode(); hashCode = (hashCode * 397) ^ Intensity.GetHashCode(); return hashCode; } } } } public class SmallMoleculeLibraryAttributes : IEquatable<SmallMoleculeLibraryAttributes> { public static SmallMoleculeLibraryAttributes EMPTY = new SmallMoleculeLibraryAttributes(null, null, null, null, null, null); public static int nItems = 4; public bool IsEmpty { get { return ReferenceEquals(this, EMPTY); } } // Helper for library caches public static SmallMoleculeLibraryAttributes FromBytes(byte[] buf, int offset) { var itemLengths = new int[nItems]; var itemStarts = new int[nItems]; for (var i = 0; i < nItems; i++) { // read item length itemLengths[i] = Library.GetInt32(buf, i, offset); itemStarts[i] = i == 0 ? offset + nItems * sizeof(int) : itemStarts[i - 1] + itemLengths[i - 1]; } return Create( Encoding.UTF8.GetString(buf, itemStarts[0], itemLengths[0]), Encoding.UTF8.GetString(buf, itemStarts[1], itemLengths[1]), Encoding.UTF8.GetString(buf, itemStarts[2], itemLengths[2]), Encoding.UTF8.GetString(buf, itemStarts[3], itemLengths[3])); } public static void ParseMolecularFormulaOrMassesString(string molecularFormulaOrMassesString, out string molecularFormula, out TypedMass? massMono, out TypedMass? massAverage) { if (molecularFormulaOrMassesString != null && molecularFormulaOrMassesString.Contains(CustomMolecule.MASS_SPLITTER)) { var parts = molecularFormulaOrMassesString.Split(CustomMolecule.MASS_SPLITTER); // We didn't have a formula so we saved masses massMono = new TypedMass(double.Parse(parts[0], CultureInfo.InvariantCulture), MassType.Monoisotopic); massAverage = new TypedMass(double.Parse(parts[1], CultureInfo.InvariantCulture), MassType.Average); molecularFormula = null; } else { massMono = null; massAverage = null; molecularFormula = molecularFormulaOrMassesString; } } public static string FormatChemicalFormulaOrMassesString(string chemicalFormula, TypedMass? massMono, TypedMass? massAverage) // For serialization - represents formula or masses, depending on what's available { if (!string.IsNullOrEmpty(chemicalFormula)) { return chemicalFormula; } if (massMono != null && massAverage != null) { Assume.IsTrue(massMono.Value.IsMonoIsotopic()); Assume.IsTrue(massAverage.Value.IsAverage()); return CustomMolecule.FormattedMasses(massMono.Value.Value, massAverage.Value.Value); // Format as dd.ddd/dd.ddd } return string.Empty; } public static byte[] ToBytes(SmallMoleculeLibraryAttributes attributes) { attributes = attributes ?? EMPTY; // Encode as <length><item><length><item>etc var items = new List<byte[]> { Encoding.UTF8.GetBytes(attributes.MoleculeName ?? string.Empty), Encoding.UTF8.GetBytes(attributes.ChemicalFormulaOrMassesString ?? string.Empty), // If no formula provided, encode monoMass and averageMass instead Encoding.UTF8.GetBytes(attributes.InChiKey ?? string.Empty), Encoding.UTF8.GetBytes(attributes.OtherKeys ?? string.Empty) }; Assume.IsTrue(Equals(nItems,items.Count)); var results = new byte[items.Sum(item => item.Length + sizeof(int))]; var index = 0; foreach (var item in items) { Array.Copy(BitConverter.GetBytes(item.Length), 0, results, index, sizeof(int)); index += sizeof(int); } foreach (var item in items) { Array.Copy(item, 0, results, index, item.Length); index += item.Length; } return results; } public static SmallMoleculeLibraryAttributes Create(string moleculeName, string chemicalFormula, TypedMass? massMono, TypedMass? massAverage, string inChiKey, string otherKeys) { if (string.IsNullOrEmpty(moleculeName) && string.IsNullOrEmpty(chemicalFormula) && massMono == null && massAverage == null && string.IsNullOrEmpty(inChiKey) && string.IsNullOrEmpty(otherKeys)) { return EMPTY; } return new SmallMoleculeLibraryAttributes(moleculeName, chemicalFormula, massMono, massAverage, inChiKey, otherKeys); } public static SmallMoleculeLibraryAttributes Create(string moleculeName, string chemicalFormulaOrMassesString, string inChiKey, IDictionary<string, string> otherKeys) { return Create(moleculeName, chemicalFormulaOrMassesString, inChiKey, otherKeys == null ? string.Empty : string.Join(@"\t", otherKeys.Select(kvp => kvp.Key + @":" + kvp.Value))); } public static SmallMoleculeLibraryAttributes Create(string moleculeName, string chemicalFormulaOrMassesString, string inChiKey, string otherKeys) { ParseMolecularFormulaOrMassesString(chemicalFormulaOrMassesString, out var chemicalFormula, out var massMono, out var massAverage); if (string.IsNullOrEmpty(moleculeName) && string.IsNullOrEmpty(chemicalFormula) && massMono == null && massAverage == null && string.IsNullOrEmpty(inChiKey) && string.IsNullOrEmpty(otherKeys)) { return EMPTY; } return new SmallMoleculeLibraryAttributes(moleculeName, chemicalFormula, massMono, massAverage, inChiKey, otherKeys); } private SmallMoleculeLibraryAttributes(string moleculeName, string chemicalFormula, TypedMass? massMono, TypedMass? massAverage, string inChiKey, string otherKeys) { MoleculeName = moleculeName; ChemicalFormulaOrMassesString = FormatChemicalFormulaOrMassesString(chemicalFormula, massMono, massAverage); // If no formula provided, encode monoMass and averageMass instead InChiKey = inChiKey; OtherKeys = otherKeys; } public string MoleculeName { get; private set; } public string ChemicalFormulaOrMassesString { get; private set; } // If no formula provided, encodes monoMass and averageMass instead as <mono>-slash-<average> public string ChemicalFormula => ChemicalFormulaOrMassesString != null && !ChemicalFormulaOrMassesString.Contains(CustomMolecule.MASS_SPLITTER) // Returns null if ChemicalFormulaOrMassesString encodes masses instead of formula ? ChemicalFormulaOrMassesString : null; public string InChiKey { get; private set; } public string OtherKeys { get; private set; } public string GetPreferredKey() { return CreateMoleculeID().PrimaryAccessionValue ?? MoleculeName; } public string Validate() { return string.IsNullOrEmpty(ChemicalFormulaOrMassesString) || (string.IsNullOrEmpty(MoleculeName) && string.IsNullOrEmpty(InChiKey) && string.IsNullOrEmpty(OtherKeys)) ? Resources.SmallMoleculeLibraryAttributes_Validate_A_small_molecule_is_defined_by_a_chemical_formula_and_at_least_one_of_Name__InChiKey__or_other_keys__HMDB_etc_ : null; } public MoleculeAccessionNumbers CreateMoleculeID() { return new MoleculeAccessionNumbers(OtherKeys, InChiKey); } public List<KeyValuePair<string,string>> LocalizedKeyValuePairs { get { var smallMolLines = new List<KeyValuePair<string, string>>(); if (!string.IsNullOrEmpty(MoleculeName)) { smallMolLines.Add(new KeyValuePair<string, string> (Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_Name, MoleculeName)); } ParseMolecularFormulaOrMassesString(ChemicalFormulaOrMassesString, out var chemicalFormula, out var massMono, out var massAverage); if (!string.IsNullOrEmpty(chemicalFormula)) { smallMolLines.Add(new KeyValuePair<string, string> (Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_Formula, chemicalFormula)); } if (massMono != null) { smallMolLines.Add(new KeyValuePair<string, string>(Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_Monoisotopic_mass, massMono.ToString())); } if (massAverage != null) { smallMolLines.Add(new KeyValuePair<string, string>(Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_Average_mass, chemicalFormula)); } if (!string.IsNullOrEmpty(InChiKey)) { smallMolLines.Add(new KeyValuePair<string, string> (Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_InChIKey, InChiKey)); } if (!string.IsNullOrEmpty(OtherKeys)) { smallMolLines.Add(new KeyValuePair<string, string> (Resources.SmallMoleculeLibraryAttributes_KeyValuePairs_OtherIDs, OtherKeys.Replace('\t','\n'))); } return smallMolLines; } } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((SmallMoleculeLibraryAttributes)obj); } public bool Equals(SmallMoleculeLibraryAttributes other) { if (other == null) return false; return Equals(MoleculeName, other.MoleculeName) && Equals(ChemicalFormulaOrMassesString, other.ChemicalFormulaOrMassesString) && Equals(InChiKey, other.InChiKey) && Equals(OtherKeys, other.OtherKeys); } public override int GetHashCode() { unchecked { var hashCode = (MoleculeName != null ? MoleculeName.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (ChemicalFormulaOrMassesString != null ? ChemicalFormulaOrMassesString.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (InChiKey != null ? InChiKey.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (OtherKeys != null ? OtherKeys.GetHashCode() : 0); return hashCode; } } public override string ToString() { return GetPreferredKey(); } } /// <summary> /// Transfer format for library spectra /// </summary> public class SpectrumMzInfo { public string SourceFile { get; set; } public LibKey Key { get; set; } public string Protein { get; set; } // Also used as Molecule List Name for small molecules public SmallMoleculeLibraryAttributes SmallMoleculeLibraryAttributes { get { return Key.SmallMoleculeLibraryAttributes; } } public IonMobilityAndCCS IonMobility { get; set; } public double PrecursorMz { get; set; } public double? RetentionTime { get; set; } public IsotopeLabelType Label { get; set; } public SpectrumPeaksInfo SpectrumPeaks { get; set; } public List<IonMobilityAndRT> RetentionTimes { get; set; } // (File, RT, IM, IsBest) public const double PRECURSOR_MZ_TOL = 0.001; public class IonMobilityAndRT { public string SourceFile { get; private set; } public IonMobilityAndCCS IonMobility { get; private set; } public double? RetentionTime { get; private set; } public bool IsBest { get; private set; } public IonMobilityAndRT(string sourceFile, IonMobilityAndCCS ionMobility, double? retentionTime, bool isBest) { SourceFile = sourceFile; IonMobility = ionMobility; RetentionTime = retentionTime; IsBest = isBest; } } /// <summary> /// Combine two spectra, for when transition list import has alternating light-heavy transitions, /// that need to be re-united with their groups at the end. /// </summary> public SpectrumMzInfo CombineSpectrumInfo(SpectrumMzInfo infoOther, out List<TransitionImportErrorInfo> spectrumErrors) { spectrumErrors = new List<TransitionImportErrorInfo>(); if (infoOther == null) return this; if ((PrecursorMz - infoOther.PrecursorMz) > PRECURSOR_MZ_TOL || !Equals(Label, infoOther.Label) || !Equals(Key, infoOther.Key)) { for (int i = 0; i < infoOther.SpectrumPeaks.Peaks.Length; ++i) { spectrumErrors.Add(new TransitionImportErrorInfo(string.Format(Resources.SpectrumMzInfo_CombineSpectrumInfo_Two_incompatible_transition_groups_for_sequence__0___precursor_m_z__1__, Key.Target, Key.Target, PrecursorMz), null, null, null)); } return this; } var peaks = SpectrumPeaks.Peaks; var peaksOther = infoOther.SpectrumPeaks.Peaks; var newPeaks = peaks.Concat(peaksOther).ToArray(); return new SpectrumMzInfo { SourceFile = infoOther.SourceFile, Key = infoOther.Key, Label = infoOther.Label, PrecursorMz = infoOther.PrecursorMz, IonMobility = infoOther.IonMobility, RetentionTime = infoOther.RetentionTime, SpectrumPeaks = new SpectrumPeaksInfo(newPeaks) }; } public static List<SpectrumMzInfo> RemoveDuplicateSpectra(List<SpectrumMzInfo> librarySpectra) { var uniqueSpectra = new List<SpectrumMzInfo>(); var spectraGroups = librarySpectra.GroupBy(spectrum => spectrum.Key); foreach (var spectraGroup in spectraGroups) { var spectraGroupList = spectraGroup.ToList(); spectraGroupList.Sort(CompareSpectrumMzLabels); uniqueSpectra.Add(spectraGroupList[0]); } return uniqueSpectra; } /// <summary> /// Order by isotope label type (e.g. light, heavy, ...) /// </summary> public static int CompareSpectrumMzLabels(SpectrumMzInfo info1, SpectrumMzInfo info2) { return info1.Label.CompareTo(info2.Label); } public static List<SpectrumMzInfo> GetInfoFromLibrary(Library library) { var spectrumMzInfos = new List<SpectrumMzInfo>(); foreach (var key in library.Keys) { var info = library.GetSpectra(key, null, LibraryRedundancy.best).FirstOrDefault(); if (info == null) { throw new IOException(string.Format(Resources.SpectrumMzInfo_GetInfoFromLibrary_Library_spectrum_for_sequence__0__is_missing_, key.Target)); } spectrumMzInfos.Add(new SpectrumMzInfo { SourceFile = info.FileName, Key = key, SpectrumPeaks = info.SpectrumPeaksInfo, Protein = info.Protein }); } return spectrumMzInfos; } public static List<SpectrumMzInfo> MergeWithOverwrite(List<SpectrumMzInfo> originalSpectra, List<SpectrumMzInfo> overwriteSpectra) { var finalSpectra = new List<SpectrumMzInfo>(overwriteSpectra); var dictOriginalSpectra = originalSpectra.ToDictionary(spectrum => spectrum.Key); var dictOverwriteSpectra = overwriteSpectra.ToDictionary(spectrum => spectrum.Key); finalSpectra.AddRange(from spectrum in dictOriginalSpectra where !dictOverwriteSpectra.ContainsKey(spectrum.Key) select spectrum.Value); return finalSpectra; } } public abstract class SpectrumInfo { public SpectrumInfo(IsotopeLabelType labelType, bool isBest) { LabelType = labelType; IsBest = isBest; } protected bool Equals(SpectrumInfo other) { return Equals(LabelType, other.LabelType) && string.Equals(Name, other.Name) && IsBest == other.IsBest && Equals(SpectrumPeaksInfo, other.SpectrumPeaksInfo) && Equals(ChromatogramData, other.ChromatogramData); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((SpectrumInfo) obj); } public override int GetHashCode() { unchecked { var hashCode = (LabelType != null ? LabelType.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (Name != null ? Name.GetHashCode() : 0); hashCode = (hashCode * 397) ^ IsBest.GetHashCode(); hashCode = (hashCode * 397) ^ (SpectrumPeaksInfo != null ? SpectrumPeaksInfo.GetHashCode() : 0); hashCode = (hashCode * 397) ^ (ChromatogramData != null ? ChromatogramData.GetHashCode() : 0); return hashCode; } } public IsotopeLabelType LabelType { get; protected set; } public abstract string Name { get; } public bool IsBest { get; protected set; } public abstract SpectrumPeaksInfo SpectrumPeaksInfo { get; } public abstract LibraryChromGroup ChromatogramData { get; } } public class SpectrumInfoLibrary : SpectrumInfo { private Library _library; public SpectrumInfoLibrary(Library library, IsotopeLabelType labelType, object spectrumKey): this(library, labelType, null, null, null, null, true, spectrumKey) { } public SpectrumInfoLibrary(Library library, IsotopeLabelType labelType, string filePath, double? retentionTime, IonMobilityAndCCS ionMobilityInfo, string protein, bool isBest, object spectrumKey) : base(labelType, true) { _library = library; LabelType = labelType; SpectrumKey = spectrumKey; FilePath = filePath; RetentionTime = retentionTime; IonMobilityInfo = ionMobilityInfo ?? IonMobilityAndCCS.EMPTY; Protein = protein; IsBest = isBest; } public object SpectrumKey { get; private set; } public override string Name { get { return _library.Name; } } public override SpectrumPeaksInfo SpectrumPeaksInfo { get { return _library.LoadSpectrum(SpectrumKey); } } public override LibraryChromGroup ChromatogramData { get { return _library.LoadChromatogramData(SpectrumKey); } } public SpectrumHeaderInfo SpectrumHeaderInfo { get; set; } public string FilePath { get; private set; } public string FileName { get { try { return Path.GetFileName(FilePath); } catch { return FilePath; } } } public double? RetentionTime { get; set; } public IonMobilityAndCCS IonMobilityInfo { get; private set; } public string Protein { get; private set; } // Also used as Molecule List Name for small molecules } public class SpectrumInfoProsit : SpectrumInfo { public static readonly string NAME = @"Prosit"; private SpectrumPeaksInfo _peaksInfo; public SpectrumInfoProsit(PrositMS2Spectra ms2Spectrum, TransitionGroupDocNode precursor, IsotopeLabelType labelType, int nce) : base(labelType, true) { _peaksInfo = ms2Spectrum?.GetSpectrum(precursor).SpectrumPeaks; Precursor = precursor; NCE = nce; } public override string Name { get { return NAME; } } public override SpectrumPeaksInfo SpectrumPeaksInfo { get { return _peaksInfo; } } public override LibraryChromGroup ChromatogramData { get { return null; } } public TransitionGroupDocNode Precursor { get; } public int NCE { get; } } public class LibraryChromGroup { private IList<ChromData> _chromDatas = ImmutableList.Empty<ChromData>(); public double StartTime { get; set; } public double EndTime { get; set; } public double RetentionTime { get; set; } public double? CCS { get; set; } public float[] Times { get; set; } public IList<ChromData> ChromDatas { get { return _chromDatas; } set { _chromDatas = ImmutableList.ValueOf(value); } } protected bool Equals(LibraryChromGroup other) { return ArrayUtil.EqualsDeep(_chromDatas, other._chromDatas) && StartTime.Equals(other.StartTime) && EndTime.Equals(other.EndTime) && RetentionTime.Equals(other.RetentionTime) && Equals(CCS, other.CCS) && ArrayUtil.EqualsDeep(Times, other.Times); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((LibraryChromGroup) obj); } public override int GetHashCode() { unchecked { var hashCode = (_chromDatas != null ? _chromDatas.GetHashCode() : 0); hashCode = (hashCode * 397) ^ StartTime.GetHashCode(); hashCode = (hashCode * 397) ^ EndTime.GetHashCode(); hashCode = (hashCode * 397) ^ RetentionTime.GetHashCode(); hashCode = (hashCode * 397) ^ (CCS??0).GetHashCode(); hashCode = (hashCode * 397) ^ (Times != null ? Times.GetHashCode() : 0); return hashCode; } } public class ChromData { public double Mz { get; set; } public double Height { get; set; } public float[] Intensities { get; set; } public Adduct Charge { get; set; } public IonType IonType { get; set; } public int Ordinal { get; set; } public int MassIndex { get; set; } public string FragmentName { get; set; } // Small molecule use public IonMobilityValue IonMobility { get; set; } protected bool Equals(ChromData other) { return Mz.Equals(other.Mz) && Height.Equals(other.Height) && Equals(Intensities, other.Intensities) && Charge == other.Charge && IonType == other.IonType && Ordinal == other.Ordinal && Equals(IonMobility, other.IonMobility) && Equals(FragmentName, other.FragmentName) && MassIndex == other.MassIndex; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((ChromData) obj); } public override int GetHashCode() { unchecked { var hashCode = Mz.GetHashCode(); hashCode = (hashCode * 397) ^ Height.GetHashCode(); hashCode = (hashCode * 397) ^ (Intensities != null ? Intensities.GetHashCode() : 0); hashCode = (hashCode * 397) ^ Charge.GetHashCode(); hashCode = (hashCode * 397) ^ (string.IsNullOrEmpty(FragmentName) ? 0 : FragmentName.GetHashCode()); hashCode = (hashCode * 397) ^ (int) IonType; hashCode = (hashCode * 397) ^ Ordinal; hashCode = (hashCode * 397) ^ MassIndex; hashCode = (hashCode * 397) ^ (IonMobility != null ? IonMobility.GetHashCode() : 0); return hashCode; } } } } /// <summary> /// Links to spectral library sources /// </summary> public sealed class LibraryLink { public static readonly LibraryLink PEPTIDEATLAS = new LibraryLink(@"PeptideAtlas", @"http://www.peptideatlas.org/speclib/"); public static readonly LibraryLink NIST = new LibraryLink(@"NIST", @"http://peptide.nist.gov/"); public static readonly LibraryLink GPM = new LibraryLink(@"GPM", @"ftp://ftp.thegpm.org/projects/xhunter/libs/"); private LibraryLink(string name, string href) { Name = name; Link = href; } public string Name { get; private set; } public string Link { get; private set; } // This appears in stack traces when we report unhandled parsing issues public override string ToString() { var result = new List<string>(); if (!string.IsNullOrEmpty(Name)) result.Add($@"LinkName: {Name} "); if (!string.IsNullOrEmpty(Link)) result.Add($@"LinkURL: {Link} "); return TextUtil.LineSeparate(result); } } public sealed class LibraryFiles { private IEnumerable<string> _filePaths; public IEnumerable<string> FilePaths { get { return _filePaths ?? (_filePaths = new List<string>()); } set { _filePaths = value; } } } /// <summary> /// Some spectrum library details that can be displayed in a dialog box. /// This can be the format of the library (e.g. BiblioSpec, SpectraST etc.), /// a library revision (when available), number of peptides etc. /// Optionally, appropriate links to spectral library sources can also be included. /// </summary> public sealed class LibraryDetails { private readonly IList<LibraryLink> _libLinks; private IEnumerable<SpectrumSourceFileDetails> _dataFiles; public LibraryDetails() { _libLinks = new List<LibraryLink>(); } public void AddLink(LibraryLink link) { _libLinks.Add(link); } public string Id { get; set; } // e.g. BiblioSpec, SpectraST etc. public string Format { get; set; } // library revision public string Revision { get; set; } // version of the program that generated the library public string Version { get; set; } public int SpectrumCount { get; set; } public int UniquePeptideCount { get; set; } public int TotalPsmCount { get; set; } public IEnumerable<SpectrumSourceFileDetails> DataFiles { get { return _dataFiles ?? (_dataFiles = new List<SpectrumSourceFileDetails>()); } set { _dataFiles = value; } } public IEnumerable<LibraryLink> LibLinks { get { return _libLinks; } } // This appears in stack traces when we report unhandled parsing issues public override string ToString() { var lines = new List<string>(); if (!string.IsNullOrEmpty(Format)) lines.Add($@"Format: {Format}"); if (!string.IsNullOrEmpty(Id)) lines.Add($@"LSID: {Id}"); if (!string.IsNullOrEmpty(Revision)) lines.Add($@"FileRevision: {Revision}"); if (!string.IsNullOrEmpty(Version)) lines.Add($@"SchemaVersion: {Version}"); if (_dataFiles != null && _dataFiles.Any()) lines.AddRange(_dataFiles.Select(df => df.ToString())); if (_libLinks != null && _libLinks.Any()) lines.AddRange(_libLinks.Select(link => link.ToString())); return TextUtil.LineSeparate(lines); } } /// <summary> /// Key for use in dictionaries that store library header information in /// memory. /// </summary> public struct LibKey { public static LibKey EMPTY = new LibKey(SmallMoleculeLibraryAttributes.EMPTY, Adduct.EMPTY); public LibKey(LibraryKey libraryKey) : this() { LibraryKey = libraryKey; } public LibKey(string sequence, int charge) : this() { LibraryKey = (LibraryKey) CrosslinkSequenceParser.TryParseCrosslinkLibraryKey(sequence, charge) ?? new PeptideLibraryKey(sequence, charge); } public LibKey(SmallMoleculeLibraryAttributes attributes, Adduct adduct) : this() { LibraryKey = new MoleculeLibraryKey(attributes, adduct); } public LibKey(string primaryKey, Adduct adduct) : this() { if (adduct.IsProteomic) { LibraryKey = (LibraryKey) CrosslinkSequenceParser.TryParseCrosslinkLibraryKey(primaryKey, adduct.AdductCharge) ?? new PeptideLibraryKey(primaryKey, adduct.AdductCharge); } else { LibraryKey = new MoleculeLibraryKey(SmallMoleculeLibraryAttributes.Create(primaryKey, null, null, string.Empty), adduct); } } [Track] public LibraryKey LibraryKey { get; private set; } public LibKey(double precursorMz, double? retentionTime = null) : this() // TODO(bspratt) probably should add ion mobility { LibraryKey = new PrecursorLibraryKey(precursorMz, retentionTime); } public LibKey(Target target, Adduct adduct) : this() { if (target.IsProteomic) { LibraryKey = (LibraryKey) CrosslinkSequenceParser.TryParseCrosslinkLibraryKey(target.Sequence, adduct.AdductCharge) ?? new PeptideLibraryKey(target.Sequence, adduct.AdductCharge); } else LibraryKey = new MoleculeLibraryKey(target.Molecule.GetSmallMoleculeLibraryAttributes(), adduct); } public LibKey(Target target, int charge) : this(target.Sequence, charge) { } public bool IsProteomicKey { get { return LibraryKey is PeptideLibraryKey; } } public bool IsSmallMoleculeKey { get { return LibraryKey is MoleculeLibraryKey; } } public bool IsPrecursorKey { get { return LibraryKey is PrecursorLibraryKey; } } public bool HasRetentionTime { get { return IsPrecursorKey && ((PrecursorLibraryKey)LibraryKey).RetentionTime.HasValue; } } public string Sequence { get { var peptideKey = LibraryKey as PeptideLibraryKey; return peptideKey == null ? null : peptideKey.ModifiedSequence; } } public Target Target { get { return LibraryKey.Target; } } public SmallMoleculeLibraryAttributes SmallMoleculeLibraryAttributes { get { var moleculeLibraryKey = LibraryKey as MoleculeLibraryKey; return moleculeLibraryKey == null ? SmallMoleculeLibraryAttributes.EMPTY : moleculeLibraryKey.SmallMoleculeLibraryAttributes; } } public int Charge { get { return IsProteomicKey ? ((PeptideLibraryKey) LibraryKey).Charge : (IsPrecursorKey ? 0 : ((MoleculeLibraryKey) LibraryKey).Adduct.AdductCharge); } } public Adduct Adduct { get { return LibraryKey.Adduct; } } public bool IsModified { get { var key = LibraryKey as PeptideLibraryKey; return key != null && key.HasModifications; } } public double? PrecursorMz { get { var key = LibraryKey as PrecursorLibraryKey; return key != null ? key.Mz : default(double?); } } public double? RetentionTime { get { var key = LibraryKey as PrecursorLibraryKey; return key != null ? key.RetentionTime : default(double?); } } public bool HasModifications { get { var peptideKey = LibraryKey as PeptideLibraryKey; return peptideKey != null && peptideKey.HasModifications; } } public int ModificationCount { get { var peptideKey = LibraryKey as PeptideLibraryKey; return peptideKey != null ? peptideKey.ModificationCount : 0; } } public static implicit operator LibraryKey(LibKey libKey) { return libKey.LibraryKey; } #region object overrides public bool Equals(LibKey obj) { return LibraryKey.IsEquivalentTo(obj.LibraryKey); } public override bool Equals(object obj) { if (obj == null) return false; if (obj.GetType() != typeof(LibKey)) return false; return Equals((LibKey)obj); // N.B. for equality we ignore any small molecule metadata } public override int GetHashCode() { return LibraryKey.GetEquivalencyHashCode(); } public override string ToString() { return LibraryKey.ToString(); } #endregion public void Write(Stream outStream) { LibraryKey.Write(outStream); } public static LibKey Read(ValueCache valueCache, Stream inStream) { return new LibKey(LibraryKey.Read(valueCache, inStream)); } } public class SpectrumSourceFileDetails { public SpectrumSourceFileDetails(string filePath, string idFilePath = null) { FilePath = filePath; IdFilePath = idFilePath; CutoffScores = new Dictionary<string, double?>(); BestSpectrum = 0; MatchedSpectrum = 0; } public string FilePath { get; private set; } public string IdFilePath { get; set; } public Dictionary<string, double?> CutoffScores { get; private set; } public int BestSpectrum { get; set; } public int MatchedSpectrum { get; set; } public override string ToString() { var result = new List<string>(); if (!string.IsNullOrEmpty(IdFilePath)) result.Add($@"IdFilePath: {IdFilePath}"); if (!string.IsNullOrEmpty(FilePath)) result.Add($@"FilePath: {FilePath}"); return TextUtil.LineSeparate(result); } } }
1
14,500
Picking nits here, but as a Dictionary already is a collection of KeyValuePairs there's undoubtably a way to code this that doesn't involve constructing any new ones.
ProteoWizard-pwiz
.cs
@@ -19,12 +19,7 @@ package org.camunda.bpm.model.cmmn; import static org.camunda.bpm.model.cmmn.impl.CmmnModelConstants.CMMN10_NS; import static org.camunda.bpm.model.cmmn.impl.CmmnModelConstants.CMMN11_NS; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import org.camunda.bpm.model.cmmn.impl.CmmnParser; import org.camunda.bpm.model.cmmn.impl.instance.ApplicabilityRuleImpl;
1
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.model.cmmn; import static org.camunda.bpm.model.cmmn.impl.CmmnModelConstants.CMMN10_NS; import static org.camunda.bpm.model.cmmn.impl.CmmnModelConstants.CMMN11_NS; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import org.camunda.bpm.model.cmmn.impl.CmmnParser; import org.camunda.bpm.model.cmmn.impl.instance.ApplicabilityRuleImpl; import org.camunda.bpm.model.cmmn.impl.instance.ArtifactImpl; import org.camunda.bpm.model.cmmn.impl.instance.AssociationImpl; import org.camunda.bpm.model.cmmn.impl.instance.BindingRefinementExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.BodyImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileItemDefinitionImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileItemImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileItemOnPartImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileItemStartTriggerImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileItemTransitionStandardEventImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseFileModelImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.CasePlanModel; import org.camunda.bpm.model.cmmn.impl.instance.CaseRefExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseRoleImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseRolesImpl; import org.camunda.bpm.model.cmmn.impl.instance.CaseTaskImpl; import org.camunda.bpm.model.cmmn.impl.instance.ChildrenImpl; import org.camunda.bpm.model.cmmn.impl.instance.CmmnElementImpl; import org.camunda.bpm.model.cmmn.impl.instance.ConditionExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.CriterionImpl; import org.camunda.bpm.model.cmmn.impl.instance.DecisionImpl; import org.camunda.bpm.model.cmmn.impl.instance.DecisionParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.DecisionRefExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.DecisionTaskImpl; import org.camunda.bpm.model.cmmn.impl.instance.DefaultControlImpl; import org.camunda.bpm.model.cmmn.impl.instance.DefinitionsImpl; import org.camunda.bpm.model.cmmn.impl.instance.DiscretionaryItemImpl; import org.camunda.bpm.model.cmmn.impl.instance.DocumentationImpl; import org.camunda.bpm.model.cmmn.impl.instance.EntryCriterionImpl; import org.camunda.bpm.model.cmmn.impl.instance.EventImpl; import org.camunda.bpm.model.cmmn.impl.instance.EventListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.ExitCriterionImpl; import org.camunda.bpm.model.cmmn.impl.instance.ExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.ExtensionElementsImpl; import org.camunda.bpm.model.cmmn.impl.instance.HumanTaskImpl; import org.camunda.bpm.model.cmmn.impl.instance.IfPartImpl; import org.camunda.bpm.model.cmmn.impl.instance.ImportImpl; import org.camunda.bpm.model.cmmn.impl.instance.InputCaseParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.InputDecisionParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.InputProcessParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.InputsCaseParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.ItemControlImpl; import org.camunda.bpm.model.cmmn.impl.instance.ManualActivationRuleImpl; import org.camunda.bpm.model.cmmn.impl.instance.MilestoneImpl; import org.camunda.bpm.model.cmmn.impl.instance.OnPartImpl; import org.camunda.bpm.model.cmmn.impl.instance.OutputCaseParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.OutputDecisionParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.OutputProcessParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.OutputsCaseParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.ParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.ParameterMappingImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanFragmentImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemControlImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemDefinitionImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemOnPartImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemStartTriggerImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanItemTransitionStandardEventImpl; import org.camunda.bpm.model.cmmn.impl.instance.PlanningTableImpl; import org.camunda.bpm.model.cmmn.impl.instance.ProcessImpl; import org.camunda.bpm.model.cmmn.impl.instance.ProcessParameterImpl; import org.camunda.bpm.model.cmmn.impl.instance.ProcessRefExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.ProcessTaskImpl; import org.camunda.bpm.model.cmmn.impl.instance.PropertyImpl; import org.camunda.bpm.model.cmmn.impl.instance.RelationshipImpl; import org.camunda.bpm.model.cmmn.impl.instance.RepetitionRuleImpl; import org.camunda.bpm.model.cmmn.impl.instance.RequiredRuleImpl; import org.camunda.bpm.model.cmmn.impl.instance.RoleImpl; import org.camunda.bpm.model.cmmn.impl.instance.SentryImpl; import org.camunda.bpm.model.cmmn.impl.instance.SourceImpl; import org.camunda.bpm.model.cmmn.impl.instance.StageImpl; import org.camunda.bpm.model.cmmn.impl.instance.StartTriggerImpl; import org.camunda.bpm.model.cmmn.impl.instance.TableItemImpl; import org.camunda.bpm.model.cmmn.impl.instance.TargetImpl; import org.camunda.bpm.model.cmmn.impl.instance.TaskImpl; import org.camunda.bpm.model.cmmn.impl.instance.TextAnnotationImpl; import org.camunda.bpm.model.cmmn.impl.instance.TextImpl; import org.camunda.bpm.model.cmmn.impl.instance.TimerEventImpl; import org.camunda.bpm.model.cmmn.impl.instance.TimerEventListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.TimerExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.TransformationExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.UserEventImpl; import org.camunda.bpm.model.cmmn.impl.instance.UserEventListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaCaseExecutionListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaExpressionImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaFieldImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaInImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaOutImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaScriptImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaStringImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaTaskListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaVariableListenerImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaVariableOnPartImpl; import org.camunda.bpm.model.cmmn.impl.instance.camunda.CamundaVariableTransitionEventImpl; import org.camunda.bpm.model.xml.Model; import org.camunda.bpm.model.xml.ModelBuilder; import org.camunda.bpm.model.xml.ModelException; import org.camunda.bpm.model.xml.ModelParseException; import org.camunda.bpm.model.xml.ModelValidationException; import org.camunda.bpm.model.xml.impl.instance.ModelElementInstanceImpl; import org.camunda.bpm.model.xml.impl.util.IoUtil; /** * @author Roman Smirnov * */ public class Cmmn { /** the singleton instance of {@link Cmmn}. If you want to customize the behavior of Cmmn, * replace this instance with an instance of a custom subclass of {@link Cmmn}. */ public static Cmmn INSTANCE = new Cmmn(); /** the parser used by the Cmmn implementation. */ private CmmnParser cmmnParser = new CmmnParser(); private final ModelBuilder cmmnModelBuilder; /** The {@link Model} */ private Model cmmnModel; /** * Allows reading a {@link CmmnModelInstance} from a File. * * @param file the {@link File} to read the {@link CmmnModelInstance} from * @return the model read * @throws CmmnModelException if the model cannot be read */ public static CmmnModelInstance readModelFromFile(File file) { return INSTANCE.doReadModelFromFile(file); } /** * Allows reading a {@link CmmnModelInstance} from an {@link InputStream} * * @param stream the {@link InputStream} to read the {@link CmmnModelInstance} from * @return the model read * @throws ModelParseException if the model cannot be read */ public static CmmnModelInstance readModelFromStream(InputStream stream) { return INSTANCE.doReadModelFromInputStream(stream); } /** * Allows writing a {@link CmmnModelInstance} to a File. It will be * validated before writing. * * @param file the {@link File} to write the {@link CmmnModelInstance} to * @param modelInstance the {@link CmmnModelInstance} to write * @throws CmmnModelException if the model cannot be written * @throws ModelValidationException if the model is not valid */ public static void writeModelToFile(File file, CmmnModelInstance modelInstance) { INSTANCE.doWriteModelToFile(file, modelInstance); } /** * Allows writing a {@link CmmnModelInstance} to an {@link OutputStream}. It will be * validated before writing. * * @param stream the {@link OutputStream} to write the {@link CmmnModelInstance} to * @param modelInstance the {@link CmmnModelInstance} to write * @throws ModelException if the model cannot be written * @throws ModelValidationException if the model is not valid */ public static void writeModelToStream(OutputStream stream, CmmnModelInstance modelInstance) { INSTANCE.doWriteModelToOutputStream(stream, modelInstance); } /** * Allows the conversion of a {@link CmmnModelInstance} to an {@link String}. It will * be validated before conversion. * * @param modelInstance the model instance to convert * @return the XML string representation of the model instance */ public static String convertToString(CmmnModelInstance modelInstance) { return INSTANCE.doConvertToString(modelInstance); } /** * Validate model DOM document * * @param modelInstance the {@link CmmnModelInstance} to validate * @throws ModelValidationException if the model is not valid */ public static void validateModel(CmmnModelInstance modelInstance) { INSTANCE.doValidateModel(modelInstance); } /** * Allows creating an new, empty {@link CmmnModelInstance}. * * @return the empty model. */ public static CmmnModelInstance createEmptyModel() { return INSTANCE.doCreateEmptyModel(); } /** * Register known types of the Cmmn model */ protected Cmmn() { cmmnModelBuilder = ModelBuilder.createInstance("CMMN Model"); cmmnModelBuilder.alternativeNamespace(CMMN10_NS, CMMN11_NS); doRegisterTypes(cmmnModelBuilder); cmmnModel = cmmnModelBuilder.build(); } protected CmmnModelInstance doReadModelFromFile(File file) { InputStream is = null; try { is = new FileInputStream(file); return doReadModelFromInputStream(is); } catch (FileNotFoundException e) { throw new CmmnModelException("Cannot read model from file "+file+": file does not exist."); } finally { IoUtil.closeSilently(is); } } protected CmmnModelInstance doReadModelFromInputStream(InputStream is) { return cmmnParser.parseModelFromStream(is); } protected void doWriteModelToFile(File file, CmmnModelInstance modelInstance) { OutputStream os = null; try { os = new FileOutputStream(file); doWriteModelToOutputStream(os, modelInstance); } catch (FileNotFoundException e) { throw new CmmnModelException("Cannot write model to file "+file+": file does not exist."); } finally { IoUtil.closeSilently(os); } } protected void doWriteModelToOutputStream(OutputStream os, CmmnModelInstance modelInstance) { // validate DOM document doValidateModel(modelInstance); // write XML IoUtil.writeDocumentToOutputStream(modelInstance.getDocument(), os); } protected String doConvertToString(CmmnModelInstance modelInstance) { // validate DOM document doValidateModel(modelInstance); // convert to XML string return IoUtil.convertXmlDocumentToString(modelInstance.getDocument()); } protected void doValidateModel(CmmnModelInstance modelInstance) { cmmnParser.validateModel(modelInstance.getDocument()); } protected CmmnModelInstance doCreateEmptyModel() { return cmmnParser.getEmptyModel(); } protected void doRegisterTypes(ModelBuilder modelBuilder) { ArtifactImpl.registerType(modelBuilder); ApplicabilityRuleImpl.registerType(modelBuilder); AssociationImpl.registerType(modelBuilder); BindingRefinementExpressionImpl.registerType(modelBuilder); BodyImpl.registerType(modelBuilder); CaseFileImpl.registerType(modelBuilder); CaseFileItemDefinitionImpl.registerType(modelBuilder); CaseFileItemImpl.registerType(modelBuilder); CaseFileItemOnPartImpl.registerType(modelBuilder); CaseFileItemStartTriggerImpl.registerType(modelBuilder); CaseFileItemTransitionStandardEventImpl.registerType(modelBuilder); CaseFileModelImpl.registerType(modelBuilder); CaseImpl.registerType(modelBuilder); CaseParameterImpl.registerType(modelBuilder); CasePlanModel.registerType(modelBuilder); CaseRoleImpl.registerType(modelBuilder); CaseRolesImpl.registerType(modelBuilder); CaseRefExpressionImpl.registerType(modelBuilder); CaseTaskImpl.registerType(modelBuilder); ChildrenImpl.registerType(modelBuilder); CmmnElementImpl.registerType(modelBuilder); ConditionExpressionImpl.registerType(modelBuilder); CriterionImpl.registerType(modelBuilder); DecisionImpl.registerType(modelBuilder); DecisionParameterImpl.registerType(modelBuilder); DecisionRefExpressionImpl.registerType(modelBuilder); DecisionTaskImpl.registerType(modelBuilder); DefaultControlImpl.registerType(modelBuilder); DefinitionsImpl.registerType(modelBuilder); DiscretionaryItemImpl.registerType(modelBuilder); DocumentationImpl.registerType(modelBuilder); EntryCriterionImpl.registerType(modelBuilder); EventImpl.registerType(modelBuilder); EventListenerImpl.registerType(modelBuilder); ExitCriterionImpl.registerType(modelBuilder); ExpressionImpl.registerType(modelBuilder); ExtensionElementsImpl.registerType(modelBuilder); HumanTaskImpl.registerType(modelBuilder); IfPartImpl.registerType(modelBuilder); ImportImpl.registerType(modelBuilder); InputCaseParameterImpl.registerType(modelBuilder); InputProcessParameterImpl.registerType(modelBuilder); InputsCaseParameterImpl.registerType(modelBuilder); InputDecisionParameterImpl.registerType(modelBuilder); InputProcessParameterImpl.registerType(modelBuilder); ItemControlImpl.registerType(modelBuilder); ManualActivationRuleImpl.registerType(modelBuilder); MilestoneImpl.registerType(modelBuilder); ModelElementInstanceImpl.registerType(modelBuilder); OnPartImpl.registerType(modelBuilder); OutputCaseParameterImpl.registerType(modelBuilder); OutputProcessParameterImpl.registerType(modelBuilder); OutputsCaseParameterImpl.registerType(modelBuilder); OutputDecisionParameterImpl.registerType(modelBuilder); OutputProcessParameterImpl.registerType(modelBuilder); ParameterImpl.registerType(modelBuilder); ParameterMappingImpl.registerType(modelBuilder); PlanFragmentImpl.registerType(modelBuilder); PlanItemControlImpl.registerType(modelBuilder); PlanItemDefinitionImpl.registerType(modelBuilder); PlanItemImpl.registerType(modelBuilder); PlanItemOnPartImpl.registerType(modelBuilder); PlanItemStartTriggerImpl.registerType(modelBuilder); PlanItemTransitionStandardEventImpl.registerType(modelBuilder); PlanningTableImpl.registerType(modelBuilder); ProcessImpl.registerType(modelBuilder); ProcessParameterImpl.registerType(modelBuilder); ProcessRefExpressionImpl.registerType(modelBuilder); ProcessTaskImpl.registerType(modelBuilder); PropertyImpl.registerType(modelBuilder); RelationshipImpl.registerType(modelBuilder); RepetitionRuleImpl.registerType(modelBuilder); RequiredRuleImpl.registerType(modelBuilder); RoleImpl.registerType(modelBuilder); SentryImpl.registerType(modelBuilder); SourceImpl.registerType(modelBuilder); StageImpl.registerType(modelBuilder); StartTriggerImpl.registerType(modelBuilder); TableItemImpl.registerType(modelBuilder); TargetImpl.registerType(modelBuilder); TaskImpl.registerType(modelBuilder); TextAnnotationImpl.registerType(modelBuilder); TextImpl.registerType(modelBuilder); TimerEventImpl.registerType(modelBuilder); TimerEventListenerImpl.registerType(modelBuilder); TransformationExpressionImpl.registerType(modelBuilder); TimerExpressionImpl.registerType(modelBuilder); TransformationExpressionImpl.registerType(modelBuilder); UserEventImpl.registerType(modelBuilder); UserEventListenerImpl.registerType(modelBuilder); /** camunda extensions */ CamundaCaseExecutionListenerImpl.registerType(modelBuilder); CamundaExpressionImpl.registerType(modelBuilder); CamundaFieldImpl.registerType(modelBuilder); CamundaInImpl.registerType(modelBuilder); CamundaOutImpl.registerType(modelBuilder); CamundaScriptImpl.registerType(modelBuilder); CamundaStringImpl.registerType(modelBuilder); CamundaTaskListenerImpl.registerType(modelBuilder); CamundaVariableListenerImpl.registerType(modelBuilder); CamundaVariableOnPartImpl.registerType(modelBuilder); CamundaVariableTransitionEventImpl.registerType(modelBuilder); } /** * @return the {@link Model} instance to use */ public Model getCmmnModel() { return cmmnModel; } public ModelBuilder getCmmnModelBuilder() { return cmmnModelBuilder; } /** * @param cmmnModel the cmmnModel to set */ public void setCmmnModel(Model cmmnModel) { this.cmmnModel = cmmnModel; } }
1
11,005
As Miklas stated, please avoid wildcard imports.
camunda-camunda-bpm-platform
java
@@ -204,5 +204,4 @@ function server (inputCompanionOptions = {}) { return { app, companionOptions } } -const { app, companionOptions } = server() -module.exports = { app, companionOptions, server } +module.exports = { server }
1
const express = require('express') const qs = require('querystring') const helmet = require('helmet') const morgan = require('morgan') const bodyParser = require('body-parser') const { URL } = require('url') const merge = require('lodash/merge') const session = require('express-session') const addRequestId = require('express-request-id')() const logger = require('../server/logger') const redis = require('../server/redis') const companion = require('../companion') const helper = require('./helper') const middlewares = require('../server/middlewares') /** * Configures an Express app for running Companion standalone * * @returns {object} */ function server (inputCompanionOptions = {}) { const app = express() // Query string keys whose values should not end up in logging output. const sensitiveKeys = new Set(['access_token', 'uppyAuthToken']) /** * Obscure the contents of query string keys listed in `sensitiveKeys`. * * Returns a copy of the object with unknown types removed and sensitive values replaced by ***. * * The input type is more broad that it needs to be, this way typescript can help us guarantee that we're dealing with all possible inputs :) * * @param {{ [key: string]: any }} rawQuery * @returns {{ * query: { [key: string]: string }, * censored: boolean * }} */ function censorQuery (rawQuery) { /** @type {{ [key: string]: string }} */ const query = {} let censored = false Object.keys(rawQuery).forEach((key) => { if (typeof rawQuery[key] !== 'string') { return } if (sensitiveKeys.has(key)) { // replace logged access token query[key] = '********' censored = true } else { query[key] = rawQuery[key] } }) return { query, censored } } app.use(addRequestId) // log server requests. app.use(morgan('combined')) morgan.token('url', (req, res) => { const { query, censored } = censorQuery(req.query) return censored ? `${req.path}?${qs.stringify(query)}` : req.originalUrl || req.url }) morgan.token('referrer', (req, res) => { const ref = req.headers.referer || req.headers.referrer if (typeof ref === 'string') { const parsed = new URL(ref) const rawQuery = qs.parse(parsed.search.replace('?', '')) const { query, censored } = censorQuery(rawQuery) return censored ? `${parsed.href.split('?')[0]}?${qs.stringify(query)}` : parsed.href } }) // for server metrics tracking. // make app metrics available at '/metrics'. // TODO for the next major version: use instead companion option "metrics": true and remove this code // Se discussion: https://github.com/transloadit/uppy/pull/2854/files/64be97205e4012818abfcc8b0b8b7fe09de91729#diff-68f5e3eb307c1c9d1fd02224fd7888e2f74718744e1b6e35d929fcab1cc50ed1 if (process.env.COMPANION_HIDE_METRICS !== 'true') { app.use(middlewares.metrics()) } app.use(bodyParser.json()) app.use(bodyParser.urlencoded({ extended: false })) // Use helmet to secure Express headers app.use(helmet.frameguard()) app.use(helmet.xssFilter()) app.use(helmet.noSniff()) app.use(helmet.ieNoOpen()) app.disable('x-powered-by') let corsOrigins if (process.env.COMPANION_CLIENT_ORIGINS) { corsOrigins = process.env.COMPANION_CLIENT_ORIGINS .split(',') .map((url) => (helper.hasProtocol(url) ? url : `${process.env.COMPANION_PROTOCOL || 'http'}://${url}`)) } else if (process.env.COMPANION_CLIENT_ORIGINS_REGEX) { corsOrigins = new RegExp(process.env.COMPANION_CLIENT_ORIGINS_REGEX) } const moreCompanionOptions = { ...inputCompanionOptions, corsOrigins } const companionOptions = helper.getCompanionOptions(moreCompanionOptions) const sessionOptions = { secret: companionOptions.secret, resave: true, saveUninitialized: true, } if (companionOptions.redisUrl) { const RedisStore = require('connect-redis')(session) const redisClient = redis.client( merge({ url: companionOptions.redisUrl }, companionOptions.redisOptions) ) sessionOptions.store = new RedisStore({ client: redisClient }) } if (process.env.COMPANION_COOKIE_DOMAIN) { sessionOptions.cookie = { domain: process.env.COMPANION_COOKIE_DOMAIN, maxAge: 24 * 60 * 60 * 1000, // 1 day } } app.use(session(sessionOptions)) app.use((req, res, next) => { res.setHeader( 'Access-Control-Allow-Headers', 'Authorization, Origin, Content-Type, Accept' ) next() }) // Routes if (process.env.COMPANION_HIDE_WELCOME !== 'true') { app.get('/', (req, res) => { res.setHeader('Content-Type', 'text/plain') res.send(helper.buildHelpfulStartupMessage(companionOptions)) }) } let companionApp try { // initialize companion companionApp = companion.app(companionOptions) } catch (error) { console.error('\x1b[31m', error.message, '\x1b[0m') process.exit(1) } // add companion to server middleware if (process.env.COMPANION_PATH) { app.use(process.env.COMPANION_PATH, companionApp) } else { app.use(companionApp) } // WARNING: This route is added in order to validate your app with OneDrive. // Only set COMPANION_ONEDRIVE_DOMAIN_VALIDATION if you are sure that you are setting the // correct value for COMPANION_ONEDRIVE_KEY (i.e application ID). If there's a slightest possiblilty // that you might have mixed the values for COMPANION_ONEDRIVE_KEY and COMPANION_ONEDRIVE_SECRET, // please DO NOT set any value for COMPANION_ONEDRIVE_DOMAIN_VALIDATION if (process.env.COMPANION_ONEDRIVE_DOMAIN_VALIDATION === 'true' && process.env.COMPANION_ONEDRIVE_KEY) { app.get('/.well-known/microsoft-identity-association.json', (req, res) => { const content = JSON.stringify({ associatedApplications: [ { applicationId: process.env.COMPANION_ONEDRIVE_KEY }, ], }) res.header('Content-Length', `${Buffer.byteLength(content, 'utf8')}`) // use writeHead to prevent 'charset' from being appended // https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-configure-publisher-domain#to-select-a-verified-domain res.writeHead(200, { 'Content-Type': 'application/json' }) res.write(content) res.end() }) } app.use((req, res, next) => { return res.status(404).json({ message: 'Not Found' }) }) // @ts-ignore app.use((err, req, res, next) => { const logStackTrace = true if (app.get('env') === 'production') { // if the error is a URIError from the requested URL we only log the error message // to avoid uneccessary error alerts if (err.status === 400 && err instanceof URIError) { logger.error(err.message, 'root.error', req.id) } else { logger.error(err, 'root.error', req.id, logStackTrace) } res.status(err.status || 500).json({ message: 'Something went wrong', requestId: req.id }) } else { logger.error(err, 'root.error', req.id, logStackTrace) res.status(err.status || 500).json({ message: err.message, error: err, requestId: req.id }) } }) return { app, companionOptions } } const { app, companionOptions } = server() module.exports = { app, companionOptions, server }
1
14,051
Maybe change it to `module.exports = server` ? Becase there's nothing else to export here
transloadit-uppy
js
@@ -24,14 +24,13 @@ import ( "cloud.google.com/go/pubsub" "cloud.google.com/go/pubsub/pstest" + "google.golang.org/grpc/codes" reconcilertestingv1 "github.com/google/knative-gcp/pkg/reconciler/testing/v1" "github.com/google/knative-gcp/pkg/utils/authcheck" "google.golang.org/api/option" "google.golang.org/grpc" - "google.golang.org/grpc/codes" - v1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1"
1
/* Copyright 2019 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package static import ( "context" "fmt" "strings" "testing" "cloud.google.com/go/pubsub" "cloud.google.com/go/pubsub/pstest" reconcilertestingv1 "github.com/google/knative-gcp/pkg/reconciler/testing/v1" "github.com/google/knative-gcp/pkg/utils/authcheck" "google.golang.org/api/option" "google.golang.org/grpc" "google.golang.org/grpc/codes" v1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" "k8s.io/client-go/kubernetes/scheme" clientgotesting "k8s.io/client-go/testing" "knative.dev/pkg/apis" duckv1 "knative.dev/pkg/apis/duck/v1" "knative.dev/pkg/client/injection/ducks/duck/v1/addressable" _ "knative.dev/pkg/client/injection/ducks/duck/v1/addressable/fake" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" . "knative.dev/pkg/reconciler/testing" "knative.dev/pkg/resolver" gcpduckv1 "github.com/google/knative-gcp/pkg/apis/duck/v1" pubsubv1 "github.com/google/knative-gcp/pkg/apis/intevents/v1" "github.com/google/knative-gcp/pkg/client/injection/reconciler/intevents/v1/pullsubscription" "github.com/google/knative-gcp/pkg/reconciler" "github.com/google/knative-gcp/pkg/reconciler/intevents" psreconciler "github.com/google/knative-gcp/pkg/reconciler/intevents/pullsubscription" "github.com/google/knative-gcp/pkg/reconciler/intevents/pullsubscription/resources" . "github.com/google/knative-gcp/pkg/reconciler/testing" reconcilerutilspubsub "github.com/google/knative-gcp/pkg/reconciler/utils/pubsub" ) const ( sourceName = "source" sinkName = "sink" transformerName = "transformer" testNS = "testnamespace" testImage = "test_image" sourceUID = sourceName + "-abc-123" testProject = "test-project-id" testTopicID = sourceUID + "-TOPIC" generation = 1 secretName = "testing-secret" failedToReconcileSubscriptionMsg = `Failed to reconcile Pub/Sub subscription` failedToDeleteSubscriptionMsg = `Failed to delete Pub/Sub subscription` ) var ( sinkDNS = sinkName + ".mynamespace.svc.cluster.local" sinkURI = apis.HTTP(sinkDNS) transformerDNS = transformerName + ".mynamespace.svc.cluster.local" transformerURI = apis.HTTP(transformerDNS) sinkGVK = metav1.GroupVersionKind{ Group: "testing.cloud.google.com", Version: "v1", Kind: "Sink", } testSubscriptionID = fmt.Sprintf("cre-ps_%s_%s_%s", testNS, sourceName, sourceUID) transformerGVK = metav1.GroupVersionKind{ Group: "testing.cloud.google.com", Version: "v1", Kind: "Transformer", } secret = corev1.SecretKeySelector{ LocalObjectReference: corev1.LocalObjectReference{ Name: secretName, }, Key: "testing-key", } ) func init() { // Add types to scheme _ = pubsubv1.AddToScheme(scheme.Scheme) } func newSecret() *corev1.Secret { return &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Namespace: testNS, Name: secretName, }, Data: map[string][]byte{ "testing-key": []byte("abcd"), }, } } func newSink() *unstructured.Unstructured { return &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "testing.cloud.google.com/v1", "kind": "Sink", "metadata": map[string]interface{}{ "namespace": testNS, "name": sinkName, }, "status": map[string]interface{}{ "address": map[string]interface{}{ "url": sinkURI.String(), }, }, }, } } func newSinkDestination(namespace string) duckv1.Destination { return duckv1.Destination{ Ref: &duckv1.KReference{ APIVersion: "testing.cloud.google.com/v1", Kind: "Sink", Name: sinkName, Namespace: namespace, }, } } func newTransformer() *unstructured.Unstructured { return &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "testing.cloud.google.com/v1", "kind": "Transformer", "metadata": map[string]interface{}{ "namespace": testNS, "name": transformerName, }, "status": map[string]interface{}{ "address": map[string]interface{}{ "url": transformerURI.String(), }, }, }, } } func TestAllCases(t *testing.T) { table := TableTest{{ Name: "bad workqueue key", // Make sure Reconcile handles bad keys. Key: "too/many/parts", }, { Name: "key not found", // Make sure Reconcile handles good keys that don't exist. Key: "foo/not-found", }, { Name: "cannot get sink", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "InvalidSink", `InvalidSink: sinks.testing.cloud.google.com "sink" not found`), }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), // updates reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSinkNotFound(), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, }, { Name: "create client fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "SubscriptionReconcileFailed", "Failed to reconcile Pub/Sub subscription: client-create-induced-error"), }, OtherTestData: map[string]interface{}{ "client-error": "client-create-induced-error", }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionMarkNoSubscription("SubscriptionReconcileFailed", fmt.Sprintf("%s: %s", failedToReconcileSubscriptionMsg, "client-create-induced-error")), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, }, { Name: "topic exists fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "SubscriptionReconcileFailed", "Failed to reconcile Pub/Sub subscription: rpc error: code = Internal desc = Injected error"), }, OtherTestData: map[string]interface{}{ // GetTopic has a retry policy for Unknown status type, so we use Internal error instead. "server-options": []pstest.ServerReactorOption{pstest.WithErrorInjection("GetTopic", codes.Internal, "Injected error")}, }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionMarkNoSubscription("SubscriptionReconcileFailed", fmt.Sprintf("%s: %s", failedToReconcileSubscriptionMsg, "rpc error: code = Internal desc = Injected error")), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ NoSubscriptionsExist(), }, }, { Name: "topic does not exist", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "SubscriptionReconcileFailed", "Failed to reconcile Pub/Sub subscription: Topic %q does not exist", testTopicID), }, OtherTestData: map[string]interface{}{}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionMarkNoSubscription("SubscriptionReconcileFailed", fmt.Sprintf("%s: Topic %q does not exist", failedToReconcileSubscriptionMsg, testTopicID)), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ NoSubscriptionsExist(), }, }, { Name: "subscription exists fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "SubscriptionReconcileFailed", "Failed to reconcile Pub/Sub subscription: rpc error: code = Internal desc = Injected error"), }, OtherTestData: map[string]interface{}{ "server-options": []pstest.ServerReactorOption{pstest.WithErrorInjection("GetSubscription", codes.Internal, "Injected error")}, }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionMarkNoSubscription("SubscriptionReconcileFailed", fmt.Sprintf("%s: %s", failedToReconcileSubscriptionMsg, "rpc error: code = Internal desc = Injected error")), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, }, { Name: "create subscription fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "SubscriptionReconcileFailed", "Failed to reconcile Pub/Sub subscription: rpc error: code = Internal desc = Injected error"), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, "server-options": []pstest.ServerReactorOption{pstest.WithErrorInjection("CreateSubscription", codes.Internal, "Injected error")}, }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionMarkNoSubscription("SubscriptionReconcileFailed", fmt.Sprintf("%s: %s", failedToReconcileSubscriptionMsg, "rpc error: code = Internal desc = Injected error")), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, }, { Name: "successfully created subscription", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeNormal, "PullSubscriptionReconciled", `PullSubscription reconciled: "%s/%s"`, testNS, sourceName), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, WantCreates: []runtime.Object{ newReceiveAdapter(context.Background(), testImage, nil), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), // Updates reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkNoDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "sink namespace empty, default to the source one", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, SourceSpec: duckv1.SourceSpec{ Sink: newSinkDestination(""), }, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeNormal, "PullSubscriptionReconciled", `PullSubscription reconciled: "%s/%s"`, testNS, sourceName), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, WantCreates: []runtime.Object{ newReceiveAdapter(context.Background(), testImage, nil), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), // Updates reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkNoDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "sink URI set instead of ref", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, SourceSpec: duckv1.SourceSpec{ Sink: duckv1.Destination{ URI: sinkURI, }, }, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeNormal, "PullSubscriptionReconciled", `PullSubscription reconciled: "%s/%s"`, testNS, sourceName), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, WantCreates: []runtime.Object{ newReceiveAdapter(context.Background(), testImage, nil), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), // Updates reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkNoDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "successful create - reuse existing receive adapter - match", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newSecret(), newAvailableReceiveAdapter(context.Background(), testImage, nil), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeNormal, "PullSubscriptionReconciled", `PullSubscription reconciled: "%s/%s"`, testNS, sourceName), }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkNoTransformer("TransformerNil", "Transformer is nil"), reconcilertestingv1.WithPullSubscriptionTransformerURI(nil), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "successful create - reuse existing receive adapter - mismatch", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newTransformer(), newSecret(), newReceiveAdapter(context.Background(), "old"+testImage, nil), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeNormal, "PullSubscriptionReconciled", `PullSubscription reconciled: "%s/%s"`, testNS, sourceName), }, WantUpdates: []clientgotesting.UpdateActionImpl{{ ActionImpl: clientgotesting.ActionImpl{ Namespace: testNS, Verb: "update", Resource: receiveAdapterGVR(), }, Object: newReceiveAdapter(context.Background(), testImage, transformerURI), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), //WithPullSubscriptionFinalizers(resourceGroup), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkNoDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkTransformer(transformerURI), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "get existing receiver adapter fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newTransformer(), newSecret(), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "DataPlaneReconcileFailed", "Failed to reconcile Data Plane resource(s): %s", "inducing failure for list deployments"), }, WithReactors: []clientgotesting.ReactionFunc{ InduceFailure("list", "deployments"), }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployedUnknown("ReceiveAdapterGetFailed", "Error getting the Receive Adapter: inducing failure for list deployments"), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkTransformer(transformerURI), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "create receiver adapter fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newTransformer(), newSecret(), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "DataPlaneReconcileFailed", "Failed to reconcile Data Plane resource(s): %s", "inducing failure for create deployments"), }, WithReactors: []clientgotesting.ReactionFunc{ InduceFailure("create", "deployments"), }, WantCreates: []runtime.Object{ newReceiveAdapter(context.Background(), testImage, transformerURI), }, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployedFailed("ReceiveAdapterCreateFailed", "Error creating the Receive Adapter: inducing failure for create deployments"), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkTransformer(transformerURI), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "update receiver adapter fails", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSink(), newTransformer(), newSecret(), newReceiveAdapter(context.Background(), "old"+testImage, nil), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ Topic(testTopicID), }, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", sourceName), Eventf(corev1.EventTypeWarning, "DataPlaneReconcileFailed", "Failed to reconcile Data Plane resource(s): %s", "inducing failure for update deployments"), }, WithReactors: []clientgotesting.ReactionFunc{ InduceFailure("update", "deployments"), }, WantUpdates: []clientgotesting.UpdateActionImpl{{ ActionImpl: clientgotesting.ActionImpl{ Namespace: testNS, Verb: "update", Resource: receiveAdapterGVR(), }, Object: newReceiveAdapter(context.Background(), testImage, transformerURI), }}, WantPatches: []clientgotesting.PatchActionImpl{ patchFinalizers(testNS, sourceName, resourceGroup), }, WantStatusUpdates: []clientgotesting.UpdateActionImpl{{ Object: reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithInitPullSubscriptionConditions, reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionTransformer(transformerGVK, transformerName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployedFailed("ReceiveAdapterUpdateFailed", "Error updating the Receive Adapter: inducing failure for update deployments"), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionMarkTransformer(transformerURI), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSetDefaults, ), }}, PostConditions: []func(*testing.T, *TableRow){ OnlySubscriptions(testSubscriptionID), }, }, { Name: "deleting - failed to delete subscription", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionDeleted, reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSecret(), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ TopicAndSub(testTopicID, testSubscriptionID), }, "server-options": []pstest.ServerReactorOption{pstest.WithErrorInjection("DeleteSubscription", codes.Unknown, "Injected error")}, }, Key: testNS + "/" + sourceName, WantEvents: []string{ Eventf(corev1.EventTypeWarning, "SubscriptionDeleteFailed", "Failed to delete Pub/Sub subscription: rpc error: code = Unknown desc = Injected error"), }, WantStatusUpdates: nil, }, { Name: "successfully deleted subscription", Objects: []runtime.Object{ reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionObjectMetaGeneration(generation), reconcilertestingv1.WithPullSubscriptionStatusObservedGeneration(generation), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSink(sinkGVK, sinkName), reconcilertestingv1.WithPullSubscriptionMarkSubscribed(testSubscriptionID), reconcilertestingv1.WithPullSubscriptionMarkDeployed(deploymentName(), testNS), reconcilertestingv1.WithPullSubscriptionMarkSink(sinkURI), reconcilertestingv1.WithPullSubscriptionProjectID(testProject), reconcilertestingv1.WithPullSubscriptionDeleted, reconcilertestingv1.WithPullSubscriptionSetDefaults, ), newSecret(), }, OtherTestData: map[string]interface{}{ "pre": []PubsubAction{ TopicAndSub(testTopicID, testSubscriptionID), }, }, PostConditions: []func(*testing.T, *TableRow){ NoSubscriptionsExist(), }, Key: testNS + "/" + sourceName, WantEvents: nil, }} table.Test(t, MakeFactory(func(ctx context.Context, listers *Listers, cmw configmap.Watcher, testData map[string]interface{}) controller.Reconciler { ctx = addressable.WithDuck(ctx) opts := []pstest.ServerReactorOption{} if testData != nil && testData["server-options"] != nil { opts = testData["server-options"].([]pstest.ServerReactorOption) } srv := pstest.NewServer(opts...) psclient, _ := GetTestClientCreateFunc(srv.Addr)(ctx, testProject) conn, err := grpc.Dial(srv.Addr, grpc.WithInsecure()) if err != nil { panic(fmt.Errorf("failed to dial test pubsub connection: %v", err)) } close := func() { srv.Close() conn.Close() } t.Cleanup(close) if testData != nil { InjectPubsubClient(testData, psclient) if testData["pre"] != nil { fixtures := testData["pre"].([]PubsubAction) for _, f := range fixtures { f(ctx, t, psclient) } } } // use normal create function or always error one var createClientFn reconcilerutilspubsub.CreateFn if testData != nil && testData["client-error"] != nil { createClientFn = func(ctx context.Context, projectID string, opts ...option.ClientOption) (*pubsub.Client, error) { return nil, fmt.Errorf(testData["client-error"].(string)) } } else { createClientFn = GetTestClientCreateFunc(srv.Addr) } pubsubBase := &intevents.PubSubBase{ Base: reconciler.NewBase(ctx, controllerAgentName, cmw), } r := &Reconciler{ Base: &psreconciler.Base{ PubSubBase: pubsubBase, DeploymentLister: listers.GetDeploymentLister(), PullSubscriptionLister: listers.GetPullSubscriptionLister(), UriResolver: resolver.NewURIResolver(ctx, func(types.NamespacedName) {}), ReceiveAdapterImage: testImage, CreateClientFn: createClientFn, ControllerAgentName: controllerAgentName, ResourceGroup: resourceGroup, }, } r.ReconcileDataPlaneFn = r.ReconcileDeployment return pullsubscription.NewReconciler(ctx, r.Logger, r.RunClientSet, listers.GetPullSubscriptionLister(), r.Recorder, r) })) } func deploymentName() string { ps := newPullSubscription() return resources.GenerateReceiveAdapterName(ps) } func newReceiveAdapter(ctx context.Context, image string, transformer *apis.URL) runtime.Object { ps := newPullSubscription() args := &resources.ReceiveAdapterArgs{ Image: image, PullSubscription: ps, Labels: resources.GetLabels(controllerAgentName, sourceName), SubscriptionID: testSubscriptionID, SinkURI: sinkURI, TransformerURI: transformer, AuthType: authcheck.Secret, } ra := resources.MakeReceiveAdapter(ctx, args) return ra } func newAvailableReceiveAdapter(ctx context.Context, image string, transformer *apis.URL) runtime.Object { obj := newReceiveAdapter(ctx, image, transformer) ra := obj.(*v1.Deployment) WithDeploymentAvailable()(ra) return obj } func newPullSubscription() *pubsubv1.PullSubscription { return reconcilertestingv1.NewPullSubscription(sourceName, testNS, reconcilertestingv1.WithPullSubscriptionUID(sourceUID), reconcilertestingv1.WithPullSubscriptionSpec(pubsubv1.PullSubscriptionSpec{ PubSubSpec: gcpduckv1.PubSubSpec{ Secret: &secret, Project: testProject, }, Topic: testTopicID, }), reconcilertestingv1.WithPullSubscriptionSetDefaults, ) } func receiveAdapterGVR() schema.GroupVersionResource { return schema.GroupVersionResource{ Group: "apps", Version: "v1", Resource: "deployment", } } func patchFinalizers(namespace, name, finalizer string, existingFinalizers ...string) clientgotesting.PatchActionImpl { action := clientgotesting.PatchActionImpl{} action.Name = name action.Namespace = namespace for i, ef := range existingFinalizers { existingFinalizers[i] = fmt.Sprintf("%q", ef) } if finalizer != "" { existingFinalizers = append(existingFinalizers, fmt.Sprintf("%q", finalizer)) } fname := strings.Join(existingFinalizers, ",") patch := `{"metadata":{"finalizers":[` + fname + `],"resourceVersion":""}}` action.Patch = []byte(patch) return action }
1
19,411
This may be an automated change, but the import seems to fit better where it was previously.
google-knative-gcp
go
@@ -0,0 +1,9 @@ +<script type="text/javascript"> + window.analytics||(window.analytics=[]),window.analytics.methods=["identify","track","trackLink","trackForm","trackClick","trackSubmit","page","pageview","ab","alias","ready","group","on","once","off"],window.analytics.factory=function(t){return function(){var a=Array.prototype.slice.call(arguments);return a.unshift(t),window.analytics.push(a),window.analytics}};for(var i=0;i<window.analytics.methods.length;i++){var method=window.analytics.methods[i];window.analytics[method]=window.analytics.factory(method)}window.analytics.load=function(t){var a=document.createElement("script");a.type="text/javascript",a.async=!0,a.src=("https:"===document.location.protocol?"https://":"http://")+"d2dq2ahtl5zl1z.cloudfront.net/analytics.js/v1/"+t+"/analytics.min.js";var n=document.getElementsByTagName("script")[0];n.parentNode.insertBefore(a,n)},window.analytics.SNIPPET_VERSION="2.0.8", + window.analytics.load("2nexpdgku3"); + window.analytics.page(); +</script> + +<% if signed_in? %> + <%= render 'signed_in_analytics' %> +<% end %>
1
1
9,016
Might we want to separate including this snippet and sending the data for signed in users into different partials?
thoughtbot-upcase
rb
@@ -32,7 +32,11 @@ type PeerAdder interface { } type ClosestPeerer interface { - ClosestPeer(addr swarm.Address) (peerAddr swarm.Address, err error) + // ClosestPeer returns the closest connected peer we have in relation to a + // given chunk address. + // This function will ignore peers with addresses provided in skipPeers. + // Returns topology.ErrWantSelf in case base is the closest to the chunk. + ClosestPeer(addr swarm.Address, skipPeers ...swarm.Address) (peerAddr swarm.Address, err error) } type EachPeerer interface {
1
// Copyright 2020 The Swarm Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package topology import ( "context" "errors" "io" "github.com/ethersphere/bee/pkg/swarm" ) var ( ErrNotFound = errors.New("no peer found") ErrWantSelf = errors.New("node wants self") ) type Driver interface { PeerAdder ClosestPeerer EachPeerer NeighborhoodDepth() uint8 SubscribePeersChange() (c <-chan struct{}, unsubscribe func()) io.Closer } type PeerAdder interface { // AddPeers is called when peers are added to the topology backlog AddPeers(ctx context.Context, addr ...swarm.Address) error } type ClosestPeerer interface { ClosestPeer(addr swarm.Address) (peerAddr swarm.Address, err error) } type EachPeerer interface { // EachPeer iterates from closest bin to farthest EachPeer(EachPeerFunc) error // EachPeerRev iterates from farthest bin to closest EachPeerRev(EachPeerFunc) error } // EachPeerFunc is a callback that is called with a peer and its PO type EachPeerFunc func(swarm.Address, uint8) (stop, jumpToNext bool, err error)
1
13,044
closest to the address
ethersphere-bee
go
@@ -18,13 +18,17 @@ const ROW_WIDTHS_MAP_NAME = 'autoRowSize'; * @plugin AutoRowSize * @class AutoRowSize * @description - * This plugin allows to set row heights based on their highest cells. + * The `AutoRowSize` plugin allows you to set row heights based on their highest cells. * * By default, the plugin is declared as `undefined`, which makes it disabled (same as if it was declared as `false`). * Enabling this plugin may decrease the overall table performance, as it needs to calculate the heights of all cells to * resize the rows accordingly. * If you experience problems with the performance, try turning this feature off and declaring the row heights manually. * + * But, to display Handsontable's [scrollbar](https://handsontable.com/docs/8.0.0/demo-scrolling.html) + * in a proper size, you need to enable the `AutoRowSize` plugin, + * by setting the [`autoRowSize`](@/api/options.md#autoRowSize) option to `true`. + * * Row height calculations are divided into sync and async part. Each of this parts has their own advantages and * disadvantages. Synchronous calculations are faster but they block the browser UI, while the slower asynchronous * operations don't block the browser UI.
1
import { BasePlugin } from '../base'; import { arrayEach, arrayFilter } from '../../helpers/array'; import { cancelAnimationFrame, requestAnimationFrame } from '../../helpers/feature'; import { isVisible } from '../../helpers/dom/element'; import GhostTable from '../../utils/ghostTable'; import { isObject, hasOwnProperty } from '../../helpers/object'; import { valueAccordingPercent, rangeEach } from '../../helpers/number'; import SamplesGenerator from '../../utils/samplesGenerator'; import { isPercentValue } from '../../helpers/string'; import { PhysicalIndexToValueMap as IndexToValueMap } from '../../translations'; export const PLUGIN_KEY = 'autoRowSize'; export const PLUGIN_PRIORITY = 40; const ROW_WIDTHS_MAP_NAME = 'autoRowSize'; /* eslint-disable jsdoc/require-description-complete-sentence */ /** * @plugin AutoRowSize * @class AutoRowSize * @description * This plugin allows to set row heights based on their highest cells. * * By default, the plugin is declared as `undefined`, which makes it disabled (same as if it was declared as `false`). * Enabling this plugin may decrease the overall table performance, as it needs to calculate the heights of all cells to * resize the rows accordingly. * If you experience problems with the performance, try turning this feature off and declaring the row heights manually. * * Row height calculations are divided into sync and async part. Each of this parts has their own advantages and * disadvantages. Synchronous calculations are faster but they block the browser UI, while the slower asynchronous * operations don't block the browser UI. * * To configure the sync/async distribution, you can pass an absolute value (number of rows) or a percentage value to a config object: * ```js * // as a number (300 rows in sync, rest async) * autoRowSize: {syncLimit: 300},. * * // as a string (percent) * autoRowSize: {syncLimit: '40%'},. * * // allow sample duplication * autoRowSize: {syncLimit: '40%', allowSampleDuplicates: true}, * ``` * * You can also use the `allowSampleDuplicates` option to allow sampling duplicate values when calculating the row * height. __Note__, that this might have a negative impact on performance. * * To configure this plugin see {@link Options#autoRowSize}. * * @example * * ```js * const hot = new Handsontable(document.getElementById('example'), { * data: getData(), * autoRowSize: true * }); * // Access to plugin instance: * const plugin = hot.getPlugin('autoRowSize'); * * plugin.getRowHeight(4); * * if (plugin.isEnabled()) { * // code... * } * ``` */ /* eslint-enable jsdoc/require-description-complete-sentence */ export class AutoRowSize extends BasePlugin { static get PLUGIN_KEY() { return PLUGIN_KEY; } static get PLUGIN_PRIORITY() { return PLUGIN_PRIORITY; } static get CALCULATION_STEP() { return 50; } static get SYNC_CALCULATION_LIMIT() { return 500; } constructor(hotInstance) { super(hotInstance); /** * PhysicalIndexToValueMap to keep and track heights for physical row indexes. * * @private * @type {PhysicalIndexToValueMap} */ this.rowHeightsMap = void 0; /** * Columns header's height cache. * * @private * @type {number} */ this.headerHeight = null; /** * Instance of {@link GhostTable} for rows and columns size calculations. * * @private * @type {GhostTable} */ this.ghostTable = new GhostTable(this.hot); /** * Instance of {@link SamplesGenerator} for generating samples necessary for rows height calculations. * * @private * @type {SamplesGenerator} */ this.samplesGenerator = new SamplesGenerator((row, col) => { let cellValue; if (row >= 0) { cellValue = this.hot.getDataAtCell(row, col); } else if (row === -1) { cellValue = this.hot.getColHeader(col); } return { value: cellValue }; }); /** * `true` if only the first calculation was performed. * * @private * @type {boolean} */ this.firstCalculation = true; /** * `true` if the size calculation is in progress. * * @type {boolean} */ this.inProgress = false; /** * Number of already measured rows (we already know their sizes). * * @type {number} */ this.measuredRows = 0; /** * PhysicalIndexToValueMap to keep and track heights for physical row indexes. * * @private * @type {PhysicalIndexToValueMap} */ this.rowHeightsMap = new IndexToValueMap(); this.hot.rowIndexMapper.registerMap(ROW_WIDTHS_MAP_NAME, this.rowHeightsMap); // Leave the listener active to allow auto-sizing the rows when the plugin is disabled. // This is necesseary for height recalculation for resize handler doubleclick (ManualRowResize). this.addHook('beforeRowResize', (size, row, isDblClick) => this.onBeforeRowResize(size, row, isDblClick)); } /** * Checks if the plugin is enabled in the handsontable settings. This method is executed in {@link Hooks#beforeInit} * hook and if it returns `true` than the {@link AutoRowSize#enablePlugin} method is called. * * @returns {boolean} */ isEnabled() { const settings = this.hot.getSettings()[PLUGIN_KEY]; return settings === true || isObject(settings); } /** * Enables the plugin functionality for this Handsontable instance. */ enablePlugin() { if (this.enabled) { return; } this.setSamplingOptions(); this.addHook('afterLoadData', () => this.onAfterLoadData()); this.addHook('beforeChange', changes => this.onBeforeChange(changes)); this.addHook('beforeColumnResize', () => this.recalculateAllRowsHeight()); this.addHook('beforeViewRender', force => this.onBeforeViewRender(force)); this.addHook('modifyRowHeight', (height, row) => this.getRowHeight(row, height)); this.addHook('modifyColumnHeaderHeight', () => this.getColumnHeaderHeight()); super.enablePlugin(); } /** * Disables the plugin functionality for this Handsontable instance. */ disablePlugin() { this.headerHeight = null; super.disablePlugin(); // Leave the listener active to allow auto-sizing the rows when the plugin is disabled. // This is necesseary for height recalculation for resize handler doubleclick (ManualRowResize). this.addHook('beforeRowResize', (size, row, isDblClick) => this.onBeforeRowResize(size, row, isDblClick)); } /** * Calculate a given rows height. * * @param {number|object} rowRange Row index or an object with `from` and `to` indexes as a range. * @param {number|object} colRange Column index or an object with `from` and `to` indexes as a range. * @param {boolean} [force=false] If `true` the calculation will be processed regardless of whether the width exists in the cache. */ calculateRowsHeight(rowRange = { from: 0, to: this.hot.countRows() - 1 }, colRange = { from: 0, to: this.hot.countCols() - 1 }, force = false) { // eslint-disable-line max-len const rowsRange = typeof rowRange === 'number' ? { from: rowRange, to: rowRange } : rowRange; const columnsRange = typeof colRange === 'number' ? { from: colRange, to: colRange } : colRange; if (this.hot.getColHeader(0) !== null) { const samples = this.samplesGenerator.generateRowSamples(-1, columnsRange); this.ghostTable.addColumnHeadersRow(samples.get(-1)); } rangeEach(rowsRange.from, rowsRange.to, (row) => { // For rows we must calculate row height even when user had set height value manually. // We can shrink column but cannot shrink rows! if (force || this.rowHeightsMap.getValueAtIndex(row) === null) { const samples = this.samplesGenerator.generateRowSamples(row, columnsRange); arrayEach(samples, ([rowIndex, sample]) => this.ghostTable.addRow(rowIndex, sample)); } }); if (this.ghostTable.rows.length) { this.hot.batchExecution(() => { this.ghostTable.getHeights((row, height) => { if (row < 0) { this.headerHeight = height; } else { this.rowHeightsMap.setValueAtIndex(this.hot.toPhysicalRow(row), height); } }); }, true); this.measuredRows = rowsRange.to + 1; this.ghostTable.clean(); } } /** * Calculate all rows heights. The calculated row will be cached in the {@link AutoRowSize#heights} property. * To retrieve height for specified row use {@link AutoRowSize#getRowHeight} method. * * @param {object|number} colRange Row index or an object with `from` and `to` properties which define row range. */ calculateAllRowsHeight(colRange = { from: 0, to: this.hot.countCols() - 1 }) { let current = 0; const length = this.hot.countRows() - 1; let timer = null; this.inProgress = true; const loop = () => { // When hot was destroyed after calculating finished cancel frame if (!this.hot) { cancelAnimationFrame(timer); this.inProgress = false; return; } this.calculateRowsHeight({ from: current, to: Math.min(current + AutoRowSize.CALCULATION_STEP, length) }, colRange); current = current + AutoRowSize.CALCULATION_STEP + 1; if (current < length) { timer = requestAnimationFrame(loop); } else { cancelAnimationFrame(timer); this.inProgress = false; // @TODO Should call once per render cycle, currently fired separately in different plugins this.hot.view.adjustElementsSize(true); // tmp if (this.hot.view.wt.wtOverlays.leftOverlay.needFullRender) { this.hot.view.wt.wtOverlays.leftOverlay.clone.draw(); } } }; const syncLimit = this.getSyncCalculationLimit(); // sync if (this.firstCalculation && syncLimit >= 0) { this.calculateRowsHeight({ from: 0, to: syncLimit }, colRange); this.firstCalculation = false; current = syncLimit + 1; } // async if (current < length) { loop(); } else { this.inProgress = false; this.hot.view.adjustElementsSize(false); } } /** * Sets the sampling options. * * @private */ setSamplingOptions() { const setting = this.hot.getSettings()[PLUGIN_KEY]; const samplingRatio = setting && hasOwnProperty(setting, 'samplingRatio') ? setting.samplingRatio : void 0; const allowSampleDuplicates = setting && hasOwnProperty(setting, 'allowSampleDuplicates') ? setting.allowSampleDuplicates : void 0; if (samplingRatio && !isNaN(samplingRatio)) { this.samplesGenerator.setSampleCount(parseInt(samplingRatio, 10)); } if (allowSampleDuplicates) { this.samplesGenerator.setAllowDuplicates(allowSampleDuplicates); } } /** * Recalculates all rows height (overwrite cache values). */ recalculateAllRowsHeight() { if (isVisible(this.hot.view.wt.wtTable.TABLE)) { this.clearCache(); this.calculateAllRowsHeight(); } } /** * Gets value which tells how many rows should be calculated synchronously (rest of the rows will be calculated * asynchronously). The limit is calculated based on `syncLimit` set to autoRowSize option (see {@link Options#autoRowSize}). * * @returns {number} */ getSyncCalculationLimit() { const settings = this.hot.getSettings()[PLUGIN_KEY]; /* eslint-disable no-bitwise */ let limit = AutoRowSize.SYNC_CALCULATION_LIMIT; const rowsLimit = this.hot.countRows() - 1; if (isObject(settings)) { limit = settings.syncLimit; if (isPercentValue(limit)) { limit = valueAccordingPercent(rowsLimit, limit); } else { // Force to Number limit >>= 0; } } return Math.min(limit, rowsLimit); } /** * Gets the calculated row height. * * Mind that this method is different from the [Core](@/api/core.md)'s [`getRowHeight()`](@/api/core.md#getrowheight) method. * * @param {number} row Visual row index. * @param {number} [defaultHeight] Default row height. It will be picked up if no calculated height found. * @returns {number} */ getRowHeight(row, defaultHeight = void 0) { const cachedHeight = row < 0 ? this.headerHeight : this.rowHeightsMap.getValueAtIndex(this.hot.toPhysicalRow(row)); let height = defaultHeight; if (cachedHeight !== null && cachedHeight > (defaultHeight || 0)) { height = cachedHeight; } return height; } /** * Get the calculated column header height. * * @returns {number|undefined} */ getColumnHeaderHeight() { return this.headerHeight; } /** * Get the first visible row. * * @returns {number} Returns row index, -1 if table is not rendered or if there are no rows to base the the calculations on. */ getFirstVisibleRow() { const wot = this.hot.view.wt; if (wot.wtViewport.rowsVisibleCalculator) { return wot.wtTable.getFirstVisibleRow(); } if (wot.wtViewport.rowsRenderCalculator) { return wot.wtTable.getFirstRenderedRow(); } return -1; } /** * Gets the last visible row. * * @returns {number} Returns row index or -1 if table is not rendered. */ getLastVisibleRow() { const wot = this.hot.view.wt; if (wot.wtViewport.rowsVisibleCalculator) { return wot.wtTable.getLastVisibleRow(); } if (wot.wtViewport.rowsRenderCalculator) { return wot.wtTable.getLastRenderedRow(); } return -1; } /** * Clears cached heights. */ clearCache() { this.headerHeight = null; this.rowHeightsMap.init(); } /** * Clears cache by range. * * @param {object|number} range Row index or an object with `from` and `to` properties which define row range. */ clearCacheByRange(range) { const { from, to } = typeof range === 'number' ? { from: range, to: range } : range; this.hot.batchExecution(() => { rangeEach(Math.min(from, to), Math.max(from, to), (row) => { this.rowHeightsMap.setValueAtIndex(row, null); }); }, true); } /** * Checks if all heights were calculated. If not then return `true` (need recalculate). * * @returns {boolean} */ isNeedRecalculate() { return !!arrayFilter(this.rowHeightsMap.getValues().slice(0, this.measuredRows), item => (item === null)).length; } /** * On before view render listener. * * @private */ onBeforeViewRender() { const force = this.hot.renderCall; const fixedRowsBottom = this.hot.getSettings().fixedRowsBottom; const firstVisibleRow = this.getFirstVisibleRow(); const lastVisibleRow = this.getLastVisibleRow(); if (firstVisibleRow === -1 || lastVisibleRow === -1) { return; } this.calculateRowsHeight({ from: firstVisibleRow, to: lastVisibleRow }, void 0, force); // Calculate rows height synchronously for bottom overlay if (fixedRowsBottom) { const totalRows = this.hot.countRows() - 1; this.calculateRowsHeight({ from: totalRows - fixedRowsBottom, to: totalRows }); } if (this.isNeedRecalculate() && !this.inProgress) { this.calculateAllRowsHeight(); } } /** * On before row move listener. * * @private * @param {number} from Row index where was grabbed. * @param {number} to Destination row index. */ onBeforeRowMove(from, to) { this.clearCacheByRange({ from, to }); this.calculateAllRowsHeight(); } /** * On before row resize listener. * * @private * @param {number} size The size of the current row index. * @param {number} row Current row index. * @param {boolean} isDblClick Indicates if the resize was triggered by doubleclick. * @returns {number} */ onBeforeRowResize(size, row, isDblClick) { let newSize = size; if (isDblClick) { this.calculateRowsHeight(row, void 0, true); newSize = this.getRowHeight(row); } return newSize; } /** * On after load data listener. * * @private */ onAfterLoadData() { if (this.hot.view) { this.recalculateAllRowsHeight(); } else { // first load - initialization setTimeout(() => { if (this.hot) { this.recalculateAllRowsHeight(); } }, 0); } } /** * On before change listener. * * @private * @param {Array} changes 2D array containing information about each of the edited cells. */ onBeforeChange(changes) { let range = null; if (changes.length === 1) { range = changes[0][0]; } else if (changes.length > 1) { range = { from: changes[0][0], to: changes[changes.length - 1][0], }; } if (range !== null) { this.clearCacheByRange(range); } } /** * Destroys the plugin instance. */ destroy() { this.ghostTable.clean(); super.destroy(); } }
1
20,451
It should be much more clear for end-user
handsontable-handsontable
js
@@ -82,7 +82,7 @@ SINGLE_QUOTED_REGEX = re.compile("(%s)?'''" % "|".join(_PREFIXES)) DOUBLE_QUOTED_REGEX = re.compile('(%s)?"""' % "|".join(_PREFIXES)) QUOTE_DELIMITER_REGEX = re.compile("(%s)?(\"|')" % "|".join(_PREFIXES), re.DOTALL) -MSGS = { +MSGS = { # pylint: disable=consider-using-namedtuple "E1300": ( "Unsupported format character %r (%#02x) at index %d", "bad-format-character",
1
# Copyright (c) 2009-2014 LOGILAB S.A. (Paris, FRANCE) <[email protected]> # Copyright (c) 2010 Daniel Harding <[email protected]> # Copyright (c) 2012-2014 Google, Inc. # Copyright (c) 2013-2020 Claudiu Popa <[email protected]> # Copyright (c) 2014 Brett Cannon <[email protected]> # Copyright (c) 2014 Arun Persaud <[email protected]> # Copyright (c) 2015 Rene Zhang <[email protected]> # Copyright (c) 2015 Ionel Cristian Maries <[email protected]> # Copyright (c) 2016, 2018 Jakub Wilk <[email protected]> # Copyright (c) 2016 Peter Dawyndt <[email protected]> # Copyright (c) 2017 Łukasz Rogalski <[email protected]> # Copyright (c) 2017 Ville Skyttä <[email protected]> # Copyright (c) 2018, 2020 Anthony Sottile <[email protected]> # Copyright (c) 2018-2019 Lucas Cimon <[email protected]> # Copyright (c) 2018 Alan Chan <[email protected]> # Copyright (c) 2018 Yury Gribov <[email protected]> # Copyright (c) 2018 ssolanki <[email protected]> # Copyright (c) 2018 Nick Drozd <[email protected]> # Copyright (c) 2019-2021 Pierre Sassoulas <[email protected]> # Copyright (c) 2019 Wes Turner <[email protected]> # Copyright (c) 2019 Djailla <[email protected]> # Copyright (c) 2019 Hugo van Kemenade <[email protected]> # Copyright (c) 2020 Matthew Suozzo <[email protected]> # Copyright (c) 2020 hippo91 <[email protected]> # Copyright (c) 2020 谭九鼎 <[email protected]> # Copyright (c) 2020 Anthony <[email protected]> # Copyright (c) 2021 Marc Mueller <[email protected]> # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/master/LICENSE """Checker for string formatting operations. """ import builtins import collections import numbers import re import tokenize from typing import TYPE_CHECKING, Iterable import astroid from pylint.checkers import BaseChecker, BaseTokenChecker, utils from pylint.checkers.utils import check_messages from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker if TYPE_CHECKING: from typing import Counter # typing.Counter added in Python 3.6.1 _AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str") # Prefixes for both strings and bytes literals per # https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals _PREFIXES = { "r", "u", "R", "U", "f", "F", "fr", "Fr", "fR", "FR", "rf", "rF", "Rf", "RF", "b", "B", "br", "Br", "bR", "BR", "rb", "rB", "Rb", "RB", } SINGLE_QUOTED_REGEX = re.compile("(%s)?'''" % "|".join(_PREFIXES)) DOUBLE_QUOTED_REGEX = re.compile('(%s)?"""' % "|".join(_PREFIXES)) QUOTE_DELIMITER_REGEX = re.compile("(%s)?(\"|')" % "|".join(_PREFIXES), re.DOTALL) MSGS = { "E1300": ( "Unsupported format character %r (%#02x) at index %d", "bad-format-character", "Used when an unsupported format character is used in a format string.", ), "E1301": ( "Format string ends in middle of conversion specifier", "truncated-format-string", "Used when a format string terminates before the end of a " "conversion specifier.", ), "E1302": ( "Mixing named and unnamed conversion specifiers in format string", "mixed-format-string", "Used when a format string contains both named (e.g. '%(foo)d') " "and unnamed (e.g. '%d') conversion specifiers. This is also " "used when a named conversion specifier contains * for the " "minimum field width and/or precision.", ), "E1303": ( "Expected mapping for format string, not %s", "format-needs-mapping", "Used when a format string that uses named conversion specifiers " "is used with an argument that is not a mapping.", ), "W1300": ( "Format string dictionary key should be a string, not %s", "bad-format-string-key", "Used when a format string that uses named conversion specifiers " "is used with a dictionary whose keys are not all strings.", ), "W1301": ( "Unused key %r in format string dictionary", "unused-format-string-key", "Used when a format string that uses named conversion specifiers " "is used with a dictionary that contains keys not required by the " "format string.", ), "E1304": ( "Missing key %r in format string dictionary", "missing-format-string-key", "Used when a format string that uses named conversion specifiers " "is used with a dictionary that doesn't contain all the keys " "required by the format string.", ), "E1305": ( "Too many arguments for format string", "too-many-format-args", "Used when a format string that uses unnamed conversion " "specifiers is given too many arguments.", ), "E1306": ( "Not enough arguments for format string", "too-few-format-args", "Used when a format string that uses unnamed conversion " "specifiers is given too few arguments", ), "E1307": ( "Argument %r does not match format type %r", "bad-string-format-type", "Used when a type required by format string " "is not suitable for actual argument type", ), "E1310": ( "Suspicious argument in %s.%s call", "bad-str-strip-call", "The argument to a str.{l,r,}strip call contains a duplicate character, ", ), "W1302": ( "Invalid format string", "bad-format-string", "Used when a PEP 3101 format string is invalid.", ), "W1303": ( "Missing keyword argument %r for format string", "missing-format-argument-key", "Used when a PEP 3101 format string that uses named fields " "doesn't receive one or more required keywords.", ), "W1304": ( "Unused format argument %r", "unused-format-string-argument", "Used when a PEP 3101 format string that uses named " "fields is used with an argument that " "is not required by the format string.", ), "W1305": ( "Format string contains both automatic field numbering " "and manual field specification", "format-combined-specification", "Used when a PEP 3101 format string contains both automatic " "field numbering (e.g. '{}') and manual field " "specification (e.g. '{0}').", ), "W1306": ( "Missing format attribute %r in format specifier %r", "missing-format-attribute", "Used when a PEP 3101 format string uses an " "attribute specifier ({0.length}), but the argument " "passed for formatting doesn't have that attribute.", ), "W1307": ( "Using invalid lookup key %r in format specifier %r", "invalid-format-index", "Used when a PEP 3101 format string uses a lookup specifier " "({a[1]}), but the argument passed for formatting " "doesn't contain or doesn't have that key as an attribute.", ), "W1308": ( "Duplicate string formatting argument %r, consider passing as named argument", "duplicate-string-formatting-argument", "Used when we detect that a string formatting is " "repeating an argument instead of using named string arguments", ), "W1309": ( "Using an f-string that does not have any interpolated variables", "f-string-without-interpolation", "Used when we detect an f-string that does not use any interpolation variables, " "in which case it can be either a normal string or a bug in the code.", ), } OTHER_NODES = ( astroid.Const, astroid.List, astroid.Lambda, astroid.FunctionDef, astroid.ListComp, astroid.SetComp, astroid.GeneratorExp, ) BUILTINS_STR = builtins.__name__ + ".str" BUILTINS_FLOAT = builtins.__name__ + ".float" BUILTINS_INT = builtins.__name__ + ".int" def get_access_path(key, parts): """Given a list of format specifiers, returns the final access path (e.g. a.b.c[0][1]). """ path = [] for is_attribute, specifier in parts: if is_attribute: path.append(f".{specifier}") else: path.append(f"[{specifier!r}]") return str(key) + "".join(path) def arg_matches_format_type(arg_type, format_type): if format_type in "sr": # All types can be printed with %s and %r return True if isinstance(arg_type, astroid.Instance): arg_type = arg_type.pytype() if arg_type == BUILTINS_STR: return format_type == "c" if arg_type == BUILTINS_FLOAT: return format_type in "deEfFgGn%" if arg_type == BUILTINS_INT: # Integers allow all types return True return False return True class StringFormatChecker(BaseChecker): """Checks string formatting operations to ensure that the format string is valid and the arguments match the format string. """ __implements__ = (IAstroidChecker,) name = "string" msgs = MSGS # pylint: disable=too-many-branches @check_messages( "bad-format-character", "truncated-format-string", "mixed-format-string", "bad-format-string-key", "missing-format-string-key", "unused-format-string-key", "bad-string-format-type", "format-needs-mapping", "too-many-format-args", "too-few-format-args", "bad-string-format-type", ) def visit_binop(self, node): if node.op != "%": return left = node.left args = node.right if not (isinstance(left, astroid.Const) and isinstance(left.value, str)): return format_string = left.value try: ( required_keys, required_num_args, required_key_types, required_arg_types, ) = utils.parse_format_string(format_string) except utils.UnsupportedFormatCharacter as exc: formatted = format_string[exc.index] self.add_message( "bad-format-character", node=node, args=(formatted, ord(formatted), exc.index), ) return except utils.IncompleteFormatString: self.add_message("truncated-format-string", node=node) return if required_keys and required_num_args: # The format string uses both named and unnamed format # specifiers. self.add_message("mixed-format-string", node=node) elif required_keys: # The format string uses only named format specifiers. # Check that the RHS of the % operator is a mapping object # that contains precisely the set of keys required by the # format string. if isinstance(args, astroid.Dict): keys = set() unknown_keys = False for k, _ in args.items: if isinstance(k, astroid.Const): key = k.value if isinstance(key, str): keys.add(key) else: self.add_message( "bad-format-string-key", node=node, args=key ) else: # One of the keys was something other than a # constant. Since we can't tell what it is, # suppress checks for missing keys in the # dictionary. unknown_keys = True if not unknown_keys: for key in required_keys: if key not in keys: self.add_message( "missing-format-string-key", node=node, args=key ) for key in keys: if key not in required_keys: self.add_message( "unused-format-string-key", node=node, args=key ) for key, arg in args.items: if not isinstance(key, astroid.Const): continue format_type = required_key_types.get(key.value, None) arg_type = utils.safe_infer(arg) if ( format_type is not None and arg_type not in (None, astroid.Uninferable) and not arg_matches_format_type(arg_type, format_type) ): self.add_message( "bad-string-format-type", node=node, args=(arg_type.pytype(), format_type), ) elif isinstance(args, (OTHER_NODES, astroid.Tuple)): type_name = type(args).__name__ self.add_message("format-needs-mapping", node=node, args=type_name) # else: # The RHS of the format specifier is a name or # expression. It may be a mapping object, so # there's nothing we can check. else: # The format string uses only unnamed format specifiers. # Check that the number of arguments passed to the RHS of # the % operator matches the number required by the format # string. args_elts = () if isinstance(args, astroid.Tuple): rhs_tuple = utils.safe_infer(args) num_args = None if hasattr(rhs_tuple, "elts"): args_elts = rhs_tuple.elts num_args = len(args_elts) elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))): args_elts = [args] num_args = 1 else: # The RHS of the format specifier is a name or # expression. It could be a tuple of unknown size, so # there's nothing we can check. num_args = None if num_args is not None: if num_args > required_num_args: self.add_message("too-many-format-args", node=node) elif num_args < required_num_args: self.add_message("too-few-format-args", node=node) for arg, format_type in zip(args_elts, required_arg_types): if not arg: continue arg_type = utils.safe_infer(arg) if ( arg_type not in ( None, astroid.Uninferable, ) and not arg_matches_format_type(arg_type, format_type) ): self.add_message( "bad-string-format-type", node=node, args=(arg_type.pytype(), format_type), ) @check_messages("f-string-without-interpolation") def visit_joinedstr(self, node): if isinstance(node.parent, astroid.FormattedValue): return for value in node.values: if isinstance(value, astroid.FormattedValue): return self.add_message("f-string-without-interpolation", node=node) @check_messages(*MSGS) def visit_call(self, node): func = utils.safe_infer(node.func) if ( isinstance(func, astroid.BoundMethod) and isinstance(func.bound, astroid.Instance) and func.bound.name in ("str", "unicode", "bytes") ): if func.name in ("strip", "lstrip", "rstrip") and node.args: arg = utils.safe_infer(node.args[0]) if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str): return if len(arg.value) != len(set(arg.value)): self.add_message( "bad-str-strip-call", node=node, args=(func.bound.name, func.name), ) elif func.name == "format": self._check_new_format(node, func) def _detect_vacuous_formatting(self, node, positional_arguments): counter = collections.Counter( arg.name for arg in positional_arguments if isinstance(arg, astroid.Name) ) for name, count in counter.items(): if count == 1: continue self.add_message( "duplicate-string-formatting-argument", node=node, args=(name,) ) def _check_new_format(self, node, func): """Check the new string formatting.""" # Skip format nodes which don't have an explicit string on the # left side of the format operation. # We do this because our inference engine can't properly handle # redefinitions of the original string. # Note that there may not be any left side at all, if the format method # has been assigned to another variable. See issue 351. For example: # # fmt = 'some string {}'.format # fmt('arg') if isinstance(node.func, astroid.Attribute) and not isinstance( node.func.expr, astroid.Const ): return if node.starargs or node.kwargs: return try: strnode = next(func.bound.infer()) except astroid.InferenceError: return if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)): return try: call_site = astroid.arguments.CallSite.from_call(node) except astroid.InferenceError: return try: fields, num_args, manual_pos = utils.parse_format_method_string( strnode.value ) except utils.IncompleteFormatString: self.add_message("bad-format-string", node=node) return positional_arguments = call_site.positional_arguments named_arguments = call_site.keyword_arguments named_fields = {field[0] for field in fields if isinstance(field[0], str)} if num_args and manual_pos: self.add_message("format-combined-specification", node=node) return check_args = False # Consider "{[0]} {[1]}" as num_args. num_args += sum(1 for field in named_fields if field == "") if named_fields: for field in named_fields: if field and field not in named_arguments: self.add_message( "missing-format-argument-key", node=node, args=(field,) ) for field in named_arguments: if field not in named_fields: self.add_message( "unused-format-string-argument", node=node, args=(field,) ) # num_args can be 0 if manual_pos is not. num_args = num_args or manual_pos if positional_arguments or num_args: empty = any(True for field in named_fields if field == "") if named_arguments or empty: # Verify the required number of positional arguments # only if the .format got at least one keyword argument. # This means that the format strings accepts both # positional and named fields and we should warn # when one of the them is missing or is extra. check_args = True else: check_args = True if check_args: # num_args can be 0 if manual_pos is not. num_args = num_args or manual_pos if len(positional_arguments) > num_args: self.add_message("too-many-format-args", node=node) elif len(positional_arguments) < num_args: self.add_message("too-few-format-args", node=node) self._detect_vacuous_formatting(node, positional_arguments) self._check_new_format_specifiers(node, fields, named_arguments) def _check_new_format_specifiers(self, node, fields, named): """ Check attribute and index access in the format string ("{0.a}" and "{0[a]}"). """ for key, specifiers in fields: # Obtain the argument. If it can't be obtained # or inferred, skip this check. if key == "": # {[0]} will have an unnamed argument, defaulting # to 0. It will not be present in `named`, so use the value # 0 for it. key = 0 if isinstance(key, numbers.Number): try: argname = utils.get_argument_from_call(node, key) except utils.NoSuchArgumentError: continue else: if key not in named: continue argname = named[key] if argname in (astroid.Uninferable, None): continue try: argument = utils.safe_infer(argname) except astroid.InferenceError: continue if not specifiers or not argument: # No need to check this key if it doesn't # use attribute / item access continue if argument.parent and isinstance(argument.parent, astroid.Arguments): # Ignore any object coming from an argument, # because we can't infer its value properly. continue previous = argument parsed = [] for is_attribute, specifier in specifiers: if previous is astroid.Uninferable: break parsed.append((is_attribute, specifier)) if is_attribute: try: previous = previous.getattr(specifier)[0] except astroid.NotFoundError: if ( hasattr(previous, "has_dynamic_getattr") and previous.has_dynamic_getattr() ): # Don't warn if the object has a custom __getattr__ break path = get_access_path(key, parsed) self.add_message( "missing-format-attribute", args=(specifier, path), node=node, ) break else: warn_error = False if hasattr(previous, "getitem"): try: previous = previous.getitem(astroid.Const(specifier)) except ( astroid.AstroidIndexError, astroid.AstroidTypeError, astroid.AttributeInferenceError, ): warn_error = True except astroid.InferenceError: break if previous is astroid.Uninferable: break else: try: # Lookup __getitem__ in the current node, # but skip further checks, because we can't # retrieve the looked object previous.getattr("__getitem__") break except astroid.NotFoundError: warn_error = True if warn_error: path = get_access_path(key, parsed) self.add_message( "invalid-format-index", args=(specifier, path), node=node ) break try: previous = next(previous.infer()) except astroid.InferenceError: # can't check further if we can't infer it break class StringConstantChecker(BaseTokenChecker): """Check string literals""" __implements__ = (IAstroidChecker, ITokenChecker, IRawChecker) name = "string" msgs = { "W1401": ( "Anomalous backslash in string: '%s'. " "String constant might be missing an r prefix.", "anomalous-backslash-in-string", "Used when a backslash is in a literal string but not as an escape.", ), "W1402": ( "Anomalous Unicode escape in byte string: '%s'. " "String constant might be missing an r or u prefix.", "anomalous-unicode-escape-in-string", "Used when an escape like \\u is encountered in a byte " "string where it has no effect.", ), "W1404": ( "Implicit string concatenation found in %s", "implicit-str-concat", "String literals are implicitly concatenated in a " "literal iterable definition : " "maybe a comma is missing ?", {"old_names": [("W1403", "implicit-str-concat-in-sequence")]}, ), "W1405": ( "Quote delimiter %s is inconsistent with the rest of the file", "inconsistent-quotes", "Quote delimiters are not used consistently throughout a module " "(with allowances made for avoiding unnecessary escaping).", ), } options = ( ( "check-str-concat-over-line-jumps", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "This flag controls whether the " "implicit-str-concat should generate a warning " "on implicit string concatenation in sequences defined over " "several lines.", }, ), ( "check-quote-consistency", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "This flag controls whether inconsistent-quotes generates a " "warning when the character used as a quote delimiter is used " "inconsistently within a module.", }, ), ) # Characters that have a special meaning after a backslash in either # Unicode or byte strings. ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567" # Characters that have a special meaning after a backslash but only in # Unicode strings. UNICODE_ESCAPE_CHARACTERS = "uUN" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.string_tokens = {} # token position -> (token value, next token) def process_module(self, module): self._unicode_literals = "unicode_literals" in module.future_imports def process_tokens(self, tokens): encoding = "ascii" for i, (tok_type, token, start, _, line) in enumerate(tokens): if tok_type == tokenize.ENCODING: # this is always the first token processed encoding = token elif tok_type == tokenize.STRING: # 'token' is the whole un-parsed token; we can look at the start # of it to see whether it's a raw or unicode string etc. self.process_string_token(token, start[0], start[1]) # We figure the next token, ignoring comments & newlines: j = i + 1 while j < len(tokens) and tokens[j].type in ( tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT, ): j += 1 next_token = tokens[j] if j < len(tokens) else None if encoding != "ascii": # We convert `tokenize` character count into a byte count, # to match with astroid `.col_offset` start = (start[0], len(line[: start[1]].encode(encoding))) self.string_tokens[start] = (str_eval(token), next_token) if self.config.check_quote_consistency: self.check_for_consistent_string_delimiters(tokens) @check_messages("implicit-str-concat") def visit_list(self, node): self.check_for_concatenated_strings(node.elts, "list") @check_messages("implicit-str-concat") def visit_set(self, node): self.check_for_concatenated_strings(node.elts, "set") @check_messages("implicit-str-concat") def visit_tuple(self, node): self.check_for_concatenated_strings(node.elts, "tuple") def visit_assign(self, node): if isinstance(node.value, astroid.Const) and isinstance(node.value.value, str): self.check_for_concatenated_strings([node.value], "assignment") def check_for_consistent_string_delimiters( self, tokens: Iterable[tokenize.TokenInfo] ) -> None: """Adds a message for each string using inconsistent quote delimiters. Quote delimiters are used inconsistently if " and ' are mixed in a module's shortstrings without having done so to avoid escaping an internal quote character. Args: tokens: The tokens to be checked against for consistent usage. """ # typing.Counter added in Python 3.6.1 so this type hint must be a comment string_delimiters = collections.Counter() # type: Counter[str] # First, figure out which quote character predominates in the module for tok_type, token, _, _, _ in tokens: if tok_type == tokenize.STRING and _is_quote_delimiter_chosen_freely(token): string_delimiters[_get_quote_delimiter(token)] += 1 if len(string_delimiters) > 1: # Ties are broken arbitrarily most_common_delimiter = string_delimiters.most_common(1)[0][0] for tok_type, token, start, _, _ in tokens: if tok_type != tokenize.STRING: continue quote_delimiter = _get_quote_delimiter(token) if ( _is_quote_delimiter_chosen_freely(token) and quote_delimiter != most_common_delimiter ): self.add_message( "inconsistent-quotes", line=start[0], args=(quote_delimiter,) ) def check_for_concatenated_strings(self, elements, iterable_type): for elt in elements: if not ( isinstance(elt, astroid.Const) and elt.pytype() in _AST_NODE_STR_TYPES ): continue if elt.col_offset < 0: # This can happen in case of escaped newlines continue if (elt.lineno, elt.col_offset) not in self.string_tokens: # This may happen with Latin1 encoding # cf. https://github.com/PyCQA/pylint/issues/2610 continue matching_token, next_token = self.string_tokens[ (elt.lineno, elt.col_offset) ] # We detect string concatenation: the AST Const is the # combination of 2 string tokens if matching_token != elt.value and next_token is not None: if next_token.type == tokenize.STRING and ( next_token.start[0] == elt.lineno or self.config.check_str_concat_over_line_jumps ): self.add_message( "implicit-str-concat", line=elt.lineno, args=(iterable_type,) ) def process_string_token(self, token, start_row, start_col): quote_char = None index = None for index, char in enumerate(token): if char in "'\"": quote_char = char break if quote_char is None: return prefix = token[:index].lower() # markers like u, b, r. after_prefix = token[index:] # Chop off quotes quote_length = ( 3 if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char else 1 ) string_body = after_prefix[quote_length:-quote_length] # No special checks on raw strings at the moment. if "r" not in prefix: self.process_non_raw_string_token( prefix, string_body, start_row, start_col + len(prefix) + quote_length, ) def process_non_raw_string_token( self, prefix, string_body, start_row, string_start_col ): """check for bad escapes in a non-raw string. prefix: lowercase string of eg 'ur' string prefix markers. string_body: the un-parsed body of the string, not including the quote marks. start_row: integer line number in the source. string_start_col: integer col number of the string start in the source. """ # Walk through the string; if we see a backslash then escape the next # character, and skip over it. If we see a non-escaped character, # alert, and continue. # # Accept a backslash when it escapes a backslash, or a quote, or # end-of-line, or one of the letters that introduce a special escape # sequence <https://docs.python.org/reference/lexical_analysis.html> # index = 0 while True: index = string_body.find("\\", index) if index == -1: break # There must be a next character; having a backslash at the end # of the string would be a SyntaxError. next_char = string_body[index + 1] match = string_body[index : index + 2] # The column offset will vary depending on whether the string token # is broken across lines. Calculate relative to the nearest line # break or relative to the start of the token's line. last_newline = string_body.rfind("\n", 0, index) if last_newline == -1: line = start_row col_offset = index + string_start_col else: line = start_row + string_body.count("\n", 0, index) col_offset = index - last_newline - 1 if next_char in self.UNICODE_ESCAPE_CHARACTERS: if "u" in prefix: pass elif "b" not in prefix: pass # unicode by default else: self.add_message( "anomalous-unicode-escape-in-string", line=line, args=(match,), col_offset=col_offset, ) elif next_char not in self.ESCAPE_CHARACTERS: self.add_message( "anomalous-backslash-in-string", line=line, args=(match,), col_offset=col_offset, ) # Whether it was a valid escape or not, backslash followed by # another character can always be consumed whole: the second # character can never be the start of a new backslash escape. index += 2 def register(linter): """required method to auto register this checker""" linter.register_checker(StringFormatChecker(linter)) linter.register_checker(StringConstantChecker(linter)) def str_eval(token): """ Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit. This supports f-strings, contrary to `ast.literal_eval`. We have to support all string literal notations: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals """ if token[0:2].lower() in ("fr", "rf"): token = token[2:] elif token[0].lower() in ("r", "u", "f"): token = token[1:] if token[0:3] in ('"""', "'''"): return token[3:-3] return token[1:-1] def _is_long_string(string_token: str) -> bool: """Is this string token a "longstring" (is it triple-quoted)? Long strings are triple-quoted as defined in https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals This function only checks characters up through the open quotes. Because it's meant to be applied only to tokens that represent string literals, it doesn't bother to check for close-quotes (demonstrating that the literal is a well-formed string). Args: string_token: The string token to be parsed. Returns: A boolean representing whether or not this token matches a longstring regex. """ return bool( SINGLE_QUOTED_REGEX.match(string_token) or DOUBLE_QUOTED_REGEX.match(string_token) ) def _get_quote_delimiter(string_token: str) -> str: """Returns the quote character used to delimit this token string. This function does little checking for whether the token is a well-formed string. Args: string_token: The token to be parsed. Returns: A string containing solely the first quote delimiter character in the passed string. Raises: ValueError: No quote delimiter characters are present. """ match = QUOTE_DELIMITER_REGEX.match(string_token) if not match: raise ValueError("string token %s is not a well-formed string" % string_token) return match.group(2) def _is_quote_delimiter_chosen_freely(string_token: str) -> bool: """Was there a non-awkward option for the quote delimiter? Args: string_token: The quoted string whose delimiters are to be checked. Returns: Whether there was a choice in this token's quote character that would not have involved backslash-escaping an interior quote character. Long strings are excepted from this analysis under the assumption that their quote characters are set by policy. """ quote_delimiter = _get_quote_delimiter(string_token) unchosen_delimiter = '"' if quote_delimiter == "'" else "'" return bool( quote_delimiter and not _is_long_string(string_token) and unchosen_delimiter not in str_eval(string_token) )
1
14,151
Isn't this an example that this will be prone to false positives ? We had nothing to fix in the codebase (And I think there is dict that could be named tuple in the MessageStore/MessageIdStore), but we have a false positives on our checkers.
PyCQA-pylint
py
@@ -55,11 +55,11 @@ func TestOpenCensus(t *testing.T) { const provider = "gocloud.dev/blob/memblob" diff := octest.Diff(te.Spans(), te.Counts(), "gocloud.dev/blob", provider, []octest.Call{ - {"NewWriter", gcerrors.OK}, - {"NewRangeReader", gcerrors.OK}, - {"Attributes", gcerrors.OK}, - {"Delete", gcerrors.OK}, - {"NewRangeReader", gcerrors.NotFound}, + {Method: "NewWriter", Code: gcerrors.OK}, + {Method: "NewRangeReader", Code: gcerrors.OK}, + {Method: "Attributes", Code: gcerrors.OK}, + {Method: "Delete", Code: gcerrors.OK}, + {Method: "NewRangeReader", Code: gcerrors.NotFound}, }) if diff != "" { t.Error(diff)
1
// Copyright 2019 The Go Cloud Development Kit Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package blob_test import ( "context" "testing" "github.com/google/go-cmp/cmp" "go.opencensus.io/stats/view" "go.opencensus.io/tag" "gocloud.dev/blob" "gocloud.dev/blob/memblob" "gocloud.dev/gcerrors" "gocloud.dev/internal/oc" "gocloud.dev/internal/testing/octest" ) func TestOpenCensus(t *testing.T) { ctx := context.Background() te := octest.NewTestExporter(blob.OpenCensusViews) defer te.Unregister() bytes := []byte("foo") b := memblob.OpenBucket(nil) defer b.Close() if err := b.WriteAll(ctx, "key", bytes, nil); err != nil { t.Fatal(err) } if _, err := b.ReadAll(ctx, "key"); err != nil { t.Fatal(err) } if _, err := b.Attributes(ctx, "key"); err != nil { t.Fatal(err) } if err := b.Delete(ctx, "key"); err != nil { t.Fatal(err) } if _, err := b.ReadAll(ctx, "noSuchKey"); err == nil { t.Fatal("got nil, want error") } const provider = "gocloud.dev/blob/memblob" diff := octest.Diff(te.Spans(), te.Counts(), "gocloud.dev/blob", provider, []octest.Call{ {"NewWriter", gcerrors.OK}, {"NewRangeReader", gcerrors.OK}, {"Attributes", gcerrors.OK}, {"Delete", gcerrors.OK}, {"NewRangeReader", gcerrors.NotFound}, }) if diff != "" { t.Error(diff) } // Find and verify the bytes read/written metrics. var sawRead, sawWritten bool tags := []tag.Tag{{Key: oc.ProviderKey, Value: provider}} for !sawRead || !sawWritten { data := <-te.Stats switch data.View.Name { case "gocloud.dev/blob/bytes_read": if sawRead { continue } sawRead = true case "gocloud.dev/blob/bytes_written": if sawWritten { continue } sawWritten = true default: continue } if diff := cmp.Diff(data.Rows[0].Tags, tags, cmp.AllowUnexported(tag.Key{})); diff != "" { t.Errorf("tags for %s: %s", data.View.Name, diff) continue } sd, ok := data.Rows[0].Data.(*view.SumData) if !ok { t.Errorf("%s: data is %T, want SumData", data.View.Name, data.Rows[0].Data) continue } if got := int(sd.Value); got < len(bytes) { t.Errorf("%s: got %d, want at least %d", data.View.Name, got, len(bytes)) } } }
1
15,798
We control `octest`, so it shouldn't be necessary to provide struct literal keys. This is a case of vet being too picky.
google-go-cloud
go
@@ -90,5 +90,8 @@ module DMPRoadmap # is_test - (NOT advisable because test plans are excluded from statistics) # privately_visible - Only the owner and people they invite can access the plan config.default_plan_visibility = 'privately_visible' + + # The percentage of answered questions needed to enable the plan visibility section of the Share plan page + config.default_plan_percentage_answered = 50.00 end end
1
require File.expand_path('../boot', __FILE__) require 'rails/all' #require 'devise' require 'recaptcha/rails' require 'csv' # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. #if defined?(Bundler) # If you precompile assets before deploying to production, use this line #Bundler.require(*Rails.groups(:assets => %w(development test))) # If you want your assets lazily compiled in production, use this line # Bundler.require(:default, :assets, Rails.env) #end #Bundler.require(:default, Rails.env) #Changed when migrated to rails 4.0.0 Bundler.require(*Rails.groups) module DMPRoadmap class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. #commented 15.03.2016 #config.autoload_paths << Rails.root.join('lib') # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # Configure the default encoding used in templates for Ruby 1.9. config.encoding = "utf-8" # Configure sensitive parameters which will be filtered from the log file. config.filter_parameters += [:password] # Enable escaping HTML in JSON. config.active_support.escape_html_entities_in_json = true # Use SQL instead of Active Record's schema dumper when creating the database. # This is necessary if your schema can't be completely dumped by the schema dumper, # like if you have constraints or database-specific column types # config.active_record.schema_format = :sql # Enforce whitelist mode for mass assignment. # This will create an empty whitelist of attributes available for mass-assignment for all models # in your app. As such, your models will need to explicitly whitelist or blacklist accessible # parameters by using an attr_accessible or attr_protected declaration. #config.active_record.whitelist_attributes = true config.autoload_paths += %W(#{config.root}/lib) config.action_controller.include_all_helpers = true # Set the default host for mailer URLs config.action_mailer.default_url_options = { :host => "#{Socket.gethostname}" } # Enable shibboleth as an alternative authentication method # Requires server configuration and omniauth shibboleth provider configuration # See config/initializers/devise.rb config.shibboleth_enabled = false # Relative path to Shibboleth SSO Logout config.shibboleth_login = '/Shibboleth.sso/Login' config.shibboleth_logout_url = '/Shibboleth.sso/Logout?return=' # If this value is set to true your users will be presented with a list of orgs that have a # shibboleth identifier in the orgs_identifiers table. If it is set to false (default), the user # will be driven out to your federation's discovery service # # A super admin will also be able to associate orgs with their shibboleth entityIds if this is set to true config.shibboleth_use_filtered_discovery_service = false # Active Record will no longer suppress errors raised in after_rollback or after_commit # in the next version. Devise appears to be using those callbacks. # To accept the new behaviour use 'true' otherwise use 'false' config.active_record.raise_in_transactional_callbacks = true # Load Branded terminology (e.g. organization name, application name, etc.) config.branding = config_for(:branding).deep_symbolize_keys # The default visibility setting for new plans # organisationally_visible - Any member of the user's org can view, export and duplicate the plan # publicly_visibile - (NOT advisable because plans will show up in Public DMPs page by default) # is_test - (NOT advisable because test plans are excluded from statistics) # privately_visible - Only the owner and people they invite can access the plan config.default_plan_visibility = 'privately_visible' end end
1
16,958
I would put that constant under config/initializers/constant.rb and I would leave application.rb for rails specific.
DMPRoadmap-roadmap
rb
@@ -572,7 +572,18 @@ func (fup folderUpdatePrepper) updateResolutionUsage(ctx context.Context, // addUnrefToFinalResOp makes a resolutionOp at the end of opsList if // one doesn't exist yet, and then adds the given pointer as an unref // block to it. -func addUnrefToFinalResOp(ops opsList, ptr BlockPointer) opsList { +func addUnrefToFinalResOp(ops opsList, ptr BlockPointer, + doNotUnref map[BlockPointer]bool) opsList { + // Make sure the block ID we want to unref isn't in the "do not + // unref" list -- it could mean that block has already been GC'd + // by the merged branch. We can't compare pointers directly + // because GC'd pointers contain no block context. + for noUnref := range doNotUnref { + if ptr.ID == noUnref.ID { + return ops + } + } + resOp, ok := ops[len(ops)-1].(*resolutionOp) if !ok { resOp = newResolutionOp()
1
// Copyright 2017 Keybase Inc. All rights reserved. // Use of this source code is governed by a BSD // license that can be found in the LICENSE file. package libkbfs import ( "fmt" "github.com/keybase/client/go/logger" "github.com/keybase/client/go/protocol/keybase1" "github.com/keybase/kbfs/kbfsblock" "github.com/keybase/kbfs/tlf" "github.com/pkg/errors" "golang.org/x/net/context" ) // folderUpdatePrepper is a helper struct for preparing blocks and MD // updates before they get synced to the backend servers. It can be // used for a single update or for a batch of updates (e.g. conflict // resolution). type folderUpdatePrepper struct { config Config folderBranch FolderBranch blocks *folderBlockOps log logger.Logger } func (fup folderUpdatePrepper) id() tlf.ID { return fup.folderBranch.Tlf } func (fup folderUpdatePrepper) branch() BranchName { return fup.folderBranch.Branch } func (fup folderUpdatePrepper) nowUnixNano() int64 { return fup.config.Clock().Now().UnixNano() } func (fup folderUpdatePrepper) readyBlockMultiple(ctx context.Context, kmd KeyMetadata, currBlock Block, uid keybase1.UID, bps *blockPutState, bType keybase1.BlockType) ( info BlockInfo, plainSize int, err error) { info, plainSize, readyBlockData, err := ReadyBlock(ctx, fup.config.BlockCache(), fup.config.BlockOps(), fup.config.cryptoPure(), kmd, currBlock, uid, bType) if err != nil { return } bps.addNewBlock(info.BlockPointer, currBlock, readyBlockData, nil) return } func (fup folderUpdatePrepper) unembedBlockChanges( ctx context.Context, bps *blockPutState, md *RootMetadata, changes *BlockChanges, uid keybase1.UID) error { buf, err := fup.config.Codec().Encode(changes) if err != nil { return err } // Treat the block change list as a file so we can reuse all the // indirection code in fileData. block := NewFileBlock().(*FileBlock) bid, err := fup.config.cryptoPure().MakeTemporaryBlockID() if err != nil { return err } ptr := BlockPointer{ ID: bid, KeyGen: md.LatestKeyGeneration(), DataVer: fup.config.DataVersion(), DirectType: DirectBlock, Context: kbfsblock.MakeFirstContext(uid, keybase1.BlockType_MD), } file := path{fup.folderBranch, []pathNode{{ptr, fmt.Sprintf("<MD rev %d>", md.Revision())}}} dirtyBcache := simpleDirtyBlockCacheStandard() // Simple dirty bcaches don't need to be shut down. getter := func(_ context.Context, _ KeyMetadata, ptr BlockPointer, _ path, _ blockReqType) (*FileBlock, bool, error) { block, err := dirtyBcache.Get(fup.id(), ptr, fup.branch()) if err != nil { return nil, false, err } fblock, ok := block.(*FileBlock) if !ok { return nil, false, errors.Errorf( "Block for %s is not a file block, block type: %T", ptr, block) } return fblock, true, nil } cacher := func(ptr BlockPointer, block Block) error { return dirtyBcache.Put(fup.id(), ptr, fup.branch(), block) } // Start off the cache with the new block err = cacher(ptr, block) if err != nil { return err } df := newDirtyFile(file, dirtyBcache) fd := newFileData(file, uid, fup.config.cryptoPure(), fup.config.BlockSplitter(), md.ReadOnly(), getter, cacher, fup.log) // Write all the data. _, _, _, _, _, err = fd.write(ctx, buf, 0, block, DirEntry{}, df) if err != nil { return err } // There might be a new top block. topBlock, err := dirtyBcache.Get(fup.id(), ptr, fup.branch()) if err != nil { return err } block, ok := topBlock.(*FileBlock) if !ok { return errors.New("Top block change block no longer a file block") } // Ready all the child blocks. infos, err := fd.ready(ctx, fup.id(), fup.config.BlockCache(), dirtyBcache, fup.config.BlockOps(), bps, block, df) if err != nil { return err } for info := range infos { md.AddMDRefBytes(uint64(info.EncodedSize)) md.AddMDDiskUsage(uint64(info.EncodedSize)) } fup.log.CDebugf(ctx, "%d unembedded child blocks", len(infos)) // Ready the top block. info, _, err := fup.readyBlockMultiple( ctx, md.ReadOnly(), block, uid, bps, keybase1.BlockType_MD) if err != nil { return err } md.AddMDRefBytes(uint64(info.EncodedSize)) md.AddMDDiskUsage(uint64(info.EncodedSize)) md.data.cachedChanges = *changes changes.Info = info changes.Ops = nil return nil } // prepUpdateForPath updates, and readies, the blocks along the path // for the given write, up to the root of the tree or stopAt (if // specified). When it updates the root of the tree, it also modifies // the given head object with a new revision number and root block ID. // It first checks the provided lbc for blocks that may have been // modified by previous prepUpdateForPath calls or the FS calls // themselves. It returns the updated path to the changed directory, // the new or updated directory entry created as part of the call, and // a summary of all the blocks that now must be put to the block // server. // // This function is safe to use unlocked, but may modify MD to have // the same revision number as another one. Callers that require // serialized revision numbers must implement their own locking around // their instance. // // entryType must not be Sym. // // TODO: deal with multiple nodes for indirect blocks func (fup folderUpdatePrepper) prepUpdateForPath( ctx context.Context, lState *lockState, uid keybase1.UID, md *RootMetadata, newBlock Block, dir path, name string, entryType EntryType, mtime bool, ctime bool, stopAt BlockPointer, lbc localBcache) (path, DirEntry, *blockPutState, error) { // now ready each dblock and write the DirEntry for the next one // in the path currBlock := newBlock currName := name newPath := path{ FolderBranch: dir.FolderBranch, path: make([]pathNode, 0, len(dir.path)), } bps := newBlockPutState(len(dir.path)) refPath := dir.ChildPathNoPtr(name) var newDe DirEntry doSetTime := true now := fup.nowUnixNano() for len(newPath.path) < len(dir.path)+1 { info, plainSize, err := fup.readyBlockMultiple( ctx, md.ReadOnly(), currBlock, uid, bps, keybase1.BlockType_DATA) if err != nil { return path{}, DirEntry{}, nil, err } // prepend to path and setup next one newPath.path = append([]pathNode{{info.BlockPointer, currName}}, newPath.path...) // get the parent block prevIdx := len(dir.path) - len(newPath.path) var prevDblock *DirBlock var de DirEntry var nextName string nextDoSetTime := false if prevIdx < 0 { // root dir, update the MD instead de = md.data.Dir } else { prevDir := path{ FolderBranch: dir.FolderBranch, path: dir.path[:prevIdx+1], } // First, check the localBcache, which could contain // blocks that were modified across multiple calls to // prepUpdateForPath. var ok bool prevDblock, ok = lbc[prevDir.tailPointer()] if !ok { // If the block isn't in the local bcache, we // have to fetch it, possibly from the // network. Directory blocks are only ever // modified while holding mdWriterLock, so it's // safe to fetch them one at a time. prevDblock, err = fup.blocks.GetDir( ctx, lState, md.ReadOnly(), prevDir, blockWrite) if err != nil { return path{}, DirEntry{}, nil, err } } // modify the direntry for currName; make one // if it doesn't exist (which should only // happen the first time around). // // TODO: Pull the creation out of here and // into createEntryLocked(). if de, ok = prevDblock.Children[currName]; !ok { // If this isn't the first time // around, we have an error. if len(newPath.path) > 1 { return path{}, DirEntry{}, nil, NoSuchNameError{currName} } // If this is a file, the size should be 0. (TODO: // Ensure this.) If this is a directory, the size will // be filled in below. The times will be filled in // below as well, since we should only be creating a // new directory entry when doSetTime is true. de = DirEntry{ EntryInfo: EntryInfo{ Type: entryType, Size: 0, }, } // If we're creating a new directory entry, the // parent's times must be set as well. nextDoSetTime = true } currBlock = prevDblock nextName = prevDir.tailName() } if de.Type == Dir { // TODO: When we use indirect dir blocks, // we'll have to calculate the size some other // way. de.Size = uint64(plainSize) } if prevIdx < 0 { md.AddUpdate(md.data.Dir.BlockInfo, info) } else if prevDe, ok := prevDblock.Children[currName]; ok { md.AddUpdate(prevDe.BlockInfo, info) } else { // this is a new block md.AddRefBlock(info) } if len(refPath.path) > 1 { refPath = *refPath.parentPath() } de.BlockInfo = info if doSetTime { if mtime { de.Mtime = now } if ctime { de.Ctime = now } } if !newDe.IsInitialized() { newDe = de } if prevIdx < 0 { md.data.Dir = de } else { prevDblock.Children[currName] = de } currName = nextName // Stop before we get to the common ancestor; it will be taken care of // on the next sync call if prevIdx >= 0 && dir.path[prevIdx].BlockPointer == stopAt { // Put this back into the cache as dirty -- the next // prepUpdateForPath call will ready it. dblock, ok := currBlock.(*DirBlock) if !ok { return path{}, DirEntry{}, nil, BadDataError{stopAt.ID} } lbc[stopAt] = dblock break } doSetTime = nextDoSetTime } return newPath, newDe, bps, nil } // pathTreeNode represents a particular node in the part of the FS // tree affected by a set of updates which needs to be sync'd. type pathTreeNode struct { ptr BlockPointer parent *pathTreeNode children map[string]*pathTreeNode mergedPath path } type prepFolderCopyBehavior int const ( prepFolderCopyIndirectFileBlocks prepFolderCopyBehavior = 1 prepFolderDontCopyIndirectFileBlocks prepFolderCopyBehavior = 2 ) // prepTree, given a node in part of the FS tree that needs to be // sync'd, either calls prepUpdateForPath on it if the node has no // children of its own, or it calls prepTree recursively for all // children. When calling itself recursively on its children, it // instructs each child to sync only up to this node, except for the // last child which may sync back to the given stopAt pointer. This // ensures that the sync process will ready blocks that are complete // (with all child changes applied) before readying any parent blocks. // prepTree returns the merged blockPutState for itself and all of its // children. func (fup folderUpdatePrepper) prepTree(ctx context.Context, lState *lockState, unmergedChains *crChains, newMD *RootMetadata, uid keybase1.UID, node *pathTreeNode, stopAt BlockPointer, lbc localBcache, newFileBlocks fileBlockMap, dirtyBcache DirtyBlockCache, copyBehavior prepFolderCopyBehavior) ( *blockPutState, error) { // If this has no children, then sync it, as far back as stopAt. if len(node.children) == 0 { // Look for the directory block or the new file block. entryType := Dir var block Block var ok bool block, ok = lbc[node.ptr] // non-nil exactly when entryType != Dir. var fblock *FileBlock if !ok { // This must be a file, so look it up in the parent if node.parent == nil { return nil, fmt.Errorf("No parent found for node %v while "+ "syncing path %v", node.ptr, node.mergedPath.path) } fileBlocks, ok := newFileBlocks[node.parent.ptr] if !ok { return nil, fmt.Errorf("No file blocks found for parent %v", node.parent.ptr) } fblock, ok = fileBlocks[node.mergedPath.tailName()] if !ok { return nil, fmt.Errorf("No file block found name %s under "+ "parent %v", node.mergedPath.tailName(), node.parent.ptr) } block = fblock entryType = File // TODO: FIXME for Ex and Sym } var childBps *blockPutState // For an indirect file block, make sure a new // reference is made for every child block. if copyBehavior == prepFolderCopyIndirectFileBlocks && entryType != Dir && fblock.IsInd { childBps = newBlockPutState(1) var infos []BlockInfo var err error // If journaling is enabled, new references aren't // supported. We have to fetch each block and ready // it. TODO: remove this when KBFS-1149 is fixed. if TLFJournalEnabled(fup.config, fup.id()) { infos, err = fup.blocks.UndupChildrenInCopy( ctx, lState, newMD.ReadOnly(), node.mergedPath, childBps, dirtyBcache, fblock) if err != nil { return nil, err } } else { // Ready any mid-level internal children. _, err = fup.blocks.ReadyNonLeafBlocksInCopy( ctx, lState, newMD.ReadOnly(), node.mergedPath, childBps, dirtyBcache, fblock) if err != nil { return nil, err } infos, err = fup.blocks. GetIndirectFileBlockInfosWithTopBlock( ctx, lState, newMD.ReadOnly(), node.mergedPath, fblock) if err != nil { return nil, err } for _, info := range infos { // The indirect blocks were already added to // childBps, so only add the dedup'd leaf blocks. if info.RefNonce != kbfsblock.ZeroRefNonce { childBps.addNewBlock(info.BlockPointer, nil, ReadyBlockData{}, nil) } } } for _, info := range infos { newMD.AddRefBlock(info) } } // TODO: fix mtime and ctime? _, _, bps, err := fup.prepUpdateForPath( ctx, lState, uid, newMD, block, *node.mergedPath.parentPath(), node.mergedPath.tailName(), entryType, false, false, stopAt, lbc) if err != nil { return nil, err } if childBps != nil { bps.mergeOtherBps(childBps) } return bps, nil } // If there is more than one child, use this node as the stopAt // since it is the branch point, except for the last child. bps := newBlockPutState(len(lbc)) count := 0 for _, child := range node.children { localStopAt := node.ptr count++ if count == len(node.children) { localStopAt = stopAt } childBps, err := fup.prepTree( ctx, lState, unmergedChains, newMD, uid, child, localStopAt, lbc, newFileBlocks, dirtyBcache, copyBehavior) if err != nil { return nil, err } bps.mergeOtherBps(childBps) } return bps, nil } // updateResolutionUsage figures out how many bytes are referenced and // unreferenced in the merged branch by this resolution. Only needs // to be called for non-squash resolutions. func (fup folderUpdatePrepper) updateResolutionUsage(ctx context.Context, lState *lockState, md *RootMetadata, bps *blockPutState, unmergedChains, mergedChains *crChains, mostRecentMergedMD ImmutableRootMetadata, refs, unrefs map[BlockPointer]bool) error { md.SetRefBytes(0) md.SetUnrefBytes(0) md.SetMDRefBytes(0) md.SetDiskUsage(mostRecentMergedMD.DiskUsage()) md.SetMDDiskUsage(mostRecentMergedMD.MDDiskUsage()) localBlocks := make(map[BlockPointer]Block) for _, bs := range bps.blockStates { if bs.block != nil { localBlocks[bs.blockPtr] = bs.block } } // Add bytes for every ref'd block. refPtrsToFetch := make([]BlockPointer, 0, len(refs)) var refSum uint64 for ptr := range refs { if block, ok := localBlocks[ptr]; ok { refSum += uint64(block.GetEncodedSize()) } else { refPtrsToFetch = append(refPtrsToFetch, ptr) } fup.log.CDebugf(ctx, "Ref'ing block %v", ptr) } // Look up the total sum of the ref blocks in parallel to get // their sizes. // // TODO: If the blocks weren't already in the cache, this call // won't cache them, so it's kind of wasting work. Furthermore, // we might be able to get the encoded size from other sources as // well (such as its directory entry or its indirect file block) // if we happened to have come across it before. refSumFetched, err := fup.blocks.GetCleanEncodedBlocksSizeSum( ctx, lState, md.ReadOnly(), refPtrsToFetch, nil, fup.branch()) if err != nil { return err } refSum += refSumFetched fup.log.CDebugf(ctx, "Ref'ing a total of %d bytes", refSum) md.AddRefBytes(refSum) md.AddDiskUsage(refSum) unrefPtrsToFetch := make([]BlockPointer, 0, len(unrefs)) for ptr := range unrefs { original, ok := unmergedChains.originals[ptr] if !ok { original = ptr } if original != ptr || unmergedChains.isCreated(original) { // Only unref pointers that weren't created as part of the // unmerged branch. Either they existed already or they // were created as part of the merged branch. continue } // Also make sure this wasn't already removed or overwritten // on the merged branch. original, ok = mergedChains.originals[ptr] if !ok { original = ptr } mergedChain, ok := mergedChains.byOriginal[original] if (ok && original != mergedChain.mostRecent && original == ptr) || mergedChains.isDeleted(original) { continue } unrefPtrsToFetch = append(unrefPtrsToFetch, ptr) } // Look up the unref blocks in parallel to get their sizes. Since // we don't know whether these are files or directories, just look // them up generically. Ignore any recoverable errors for unrefs. // Note that we can't combine these with the above ref fetches // since they require a different MD. unrefSum, err := fup.blocks.GetCleanEncodedBlocksSizeSum( ctx, lState, mostRecentMergedMD, unrefPtrsToFetch, unrefs, fup.branch()) if err != nil { return err } // Subtract bytes for every unref'd block that wasn't created in // the unmerged branch. fup.log.CDebugf(ctx, "Unref'ing a total of %d bytes", unrefSum) md.AddUnrefBytes(unrefSum) md.SetDiskUsage(md.DiskUsage() - unrefSum) return nil } // addUnrefToFinalResOp makes a resolutionOp at the end of opsList if // one doesn't exist yet, and then adds the given pointer as an unref // block to it. func addUnrefToFinalResOp(ops opsList, ptr BlockPointer) opsList { resOp, ok := ops[len(ops)-1].(*resolutionOp) if !ok { resOp = newResolutionOp() ops = append(ops, resOp) } resOp.AddUnrefBlock(ptr) return ops } // updateResolutionUsageAndPointers figures out how many bytes are // referenced and unreferenced in the merged branch by this resolution // (if needed), and adds referenced and unreferenced pointers to a // final `resolutionOp` as necessary. It should be called before the // block changes are unembedded in md. It returns the list of blocks // that can be remove from the flushing queue, if any. func (fup folderUpdatePrepper) updateResolutionUsageAndPointers( ctx context.Context, lState *lockState, md *RootMetadata, bps *blockPutState, unmergedChains, mergedChains *crChains, mostRecentUnmergedMD, mostRecentMergedMD ImmutableRootMetadata, isLocalSquash bool) ( blocksToDelete []kbfsblock.ID, err error) { // Track the refs and unrefs in a set, to ensure no duplicates refs := make(map[BlockPointer]bool) unrefs := make(map[BlockPointer]bool) for _, op := range md.data.Changes.Ops { // Iterate in reverse since we may be deleting references as we go. for i := len(op.Refs()) - 1; i >= 0; i-- { ptr := op.Refs()[i] // Don't add usage if it's an unembedded block change // pointer. Also, we shouldn't be referencing this // anymore! if unmergedChains.blockChangePointers[ptr] { fup.log.CDebugf(ctx, "Ignoring block change ptr %v", ptr) op.DelRefBlock(ptr) } else { refs[ptr] = true } } // Iterate in reverse since we may be deleting unrefs as we go. for i := len(op.Unrefs()) - 1; i >= 0; i-- { ptr := op.Unrefs()[i] unrefs[ptr] = true delete(refs, ptr) if _, isCreateOp := op.(*createOp); isCreateOp { // The only way a create op should have unref blocks // is if it was created during conflict resolution. // In that case, we should move the unref to a final // resolution op, so it doesn't confuse future // resolutions. op.DelUnrefBlock(ptr) md.data.Changes.Ops = addUnrefToFinalResOp(md.data.Changes.Ops, ptr) } } for _, update := range op.allUpdates() { if update.Unref != update.Ref { unrefs[update.Unref] = true delete(refs, update.Unref) refs[update.Ref] = true } } } if isLocalSquash { unmergedUsage := mostRecentUnmergedMD.DiskUsage() mergedUsage := mostRecentMergedMD.DiskUsage() // Local squashes can just use the bytes and usage from the // latest unmerged MD, and we can avoid all the block fetching // done by `updateResolutionUsage()`. md.SetDiskUsage(unmergedUsage) // TODO: it might be better to add up all the ref bytes, and // all the unref bytes, from all unmerged MDs, instead of just // calculating the difference between the usages. But that's // not quite right either since it counts blocks that are // ref'd and unref'd within the squash. if md.DiskUsage() > mergedUsage { md.SetRefBytes(md.DiskUsage() - mergedUsage) md.SetUnrefBytes(0) } else { md.SetRefBytes(0) md.SetUnrefBytes(mergedUsage - md.DiskUsage()) } mergedMDUsage := mostRecentMergedMD.MDDiskUsage() if md.MDDiskUsage() < mergedMDUsage { return nil, fmt.Errorf("MD disk usage went down on unmerged "+ "branch: %d vs %d", md.MDDiskUsage(), mergedMDUsage) } // Additional MD disk usage will be determined entirely by the // later `unembedBlockChanges()` call. md.SetMDDiskUsage(mergedMDUsage) md.SetMDRefBytes(0) } else { err = fup.updateResolutionUsage( ctx, lState, md, bps, unmergedChains, mergedChains, mostRecentMergedMD, refs, unrefs) if err != nil { return nil, err } } // Any blocks that were created on the unmerged branch and have // been flushed, but didn't survive the resolution, should be // marked as unreferenced in the resolution. toUnref := make(map[BlockPointer]bool) for ptr := range unmergedChains.originals { if !refs[ptr] && !unrefs[ptr] { toUnref[ptr] = true } } for ptr := range unmergedChains.createdOriginals { if !refs[ptr] && !unrefs[ptr] && unmergedChains.byOriginal[ptr] != nil { toUnref[ptr] = true } else if unmergedChains.blockChangePointers[ptr] { toUnref[ptr] = true } } for ptr := range unmergedChains.toUnrefPointers { toUnref[ptr] = true } deletedBlocks := make(map[BlockPointer]bool) for ptr := range toUnref { if ptr == zeroPtr || unmergedChains.doNotUnrefPointers[ptr] { // A zero pointer can sneak in from the unrefs field of a // syncOp following a failed syncOp, via // `unmergedChains.toUnrefPointers` after a chain collapse. continue } isUnflushed, err := fup.config.BlockServer().IsUnflushed( ctx, fup.id(), ptr.ID) if err != nil { return nil, err } if isUnflushed { blocksToDelete = append(blocksToDelete, ptr.ID) deletedBlocks[ptr] = true // No need to unreference this since we haven't flushed it yet. continue } // Put the unrefs in a new resOp after the final operation, to // cancel out any stray refs in earlier ops. fup.log.CDebugf(ctx, "Unreferencing dropped block %v", ptr) md.data.Changes.Ops = addUnrefToFinalResOp(md.data.Changes.Ops, ptr) } // Scrub all unrefs of blocks that never made it to the server, // for smaller updates and to make things easier on the // StateChecker. if len(deletedBlocks) > 0 { for _, op := range md.data.Changes.Ops { var toUnref []BlockPointer for _, unref := range op.Unrefs() { if deletedBlocks[unref] { toUnref = append(toUnref, unref) } } for _, unref := range toUnref { op.DelUnrefBlock(unref) } } } fup.log.CDebugf(ctx, "New md byte usage: %d ref, %d unref, %d total usage "+ "(previously %d)", md.RefBytes(), md.UnrefBytes(), md.DiskUsage(), mostRecentMergedMD.DiskUsage()) return blocksToDelete, nil } func (fup folderUpdatePrepper) makeSyncTree(ctx context.Context, resolvedPaths map[BlockPointer]path, lbc localBcache, newFileBlocks fileBlockMap) *pathTreeNode { var root *pathTreeNode for _, p := range resolvedPaths { fup.log.CDebugf(ctx, "Creating tree from merged path: %v", p.path) var parent *pathTreeNode for i, pnode := range p.path { var nextNode *pathTreeNode if parent != nil { nextNode = parent.children[pnode.Name] } else if root != nil { nextNode = root } if nextNode == nil { fup.log.CDebugf(ctx, "Creating node with pointer %v", pnode.BlockPointer) nextNode = &pathTreeNode{ ptr: pnode.BlockPointer, parent: parent, children: make(map[string]*pathTreeNode), // save the full path, since we'll only use this // at the leaves anyway. mergedPath: p, } if parent != nil { parent.children[pnode.Name] = nextNode } } if parent == nil && root == nil { root = nextNode } parent = nextNode // If this node is a directory that has files to sync, // make nodes for them as well. (Because of // collapseActions, these files won't have their own // mergedPath.) blocks, ok := newFileBlocks[pnode.BlockPointer] if !ok { continue } dblock, ok := lbc[pnode.BlockPointer] if !ok { continue } for name := range blocks { if _, ok := nextNode.children[name]; ok { continue } // Try to lookup the block pointer, but this might be // for a new file. var filePtr BlockPointer if de, ok := dblock.Children[name]; ok { filePtr = de.BlockPointer } fup.log.CDebugf(ctx, "Creating child node for name %s for "+ "parent %v", name, pnode.BlockPointer) childPath := path{ FolderBranch: p.FolderBranch, path: make([]pathNode, i+2), } copy(childPath.path[0:i+1], p.path[0:i+1]) childPath.path[i+1] = pathNode{Name: name} childNode := &pathTreeNode{ ptr: filePtr, parent: nextNode, children: make(map[string]*pathTreeNode), mergedPath: childPath, } nextNode.children[name] = childNode } } } return root } // fixOpPointersForUpdate takes in a slice of "reverted" ops (all referring // to the original BlockPointers) and a map of BlockPointer updates // (from original to the new most recent pointer), and corrects all // the ops to use the new most recent pointers instead. It returns a // new slice of these operations with room in the first slot for a // dummy operation containing all the updates. func fixOpPointersForUpdate(oldOps []op, updates map[BlockPointer]BlockPointer, chains *crChains) ( []op, error) { newOps := make([]op, 0, len(oldOps)+1) newOps = append(newOps, nil) // placeholder for dummy op for _, op := range oldOps { var updatesToFix []*blockUpdate var ptrsToFix []*BlockPointer switch realOp := op.(type) { case *createOp: updatesToFix = append(updatesToFix, &realOp.Dir) // Since the created node was made exclusively during this // branch, we can use the most recent pointer for that // node as its ref. refs := realOp.Refs() realOp.RefBlocks = make([]BlockPointer, len(refs)) for i, ptr := range refs { mostRecent, err := chains.mostRecentFromOriginalOrSame(ptr) if err != nil { return nil, err } realOp.RefBlocks[i] = mostRecent ptrsToFix = append(ptrsToFix, &realOp.RefBlocks[i]) } // The leading resolutionOp will take care of the updates. realOp.Updates = nil case *rmOp: updatesToFix = append(updatesToFix, &realOp.Dir) // Since the rm'd node was made exclusively during this // branch, we can use the original pointer for that // node as its unref. unrefs := realOp.Unrefs() realOp.UnrefBlocks = make([]BlockPointer, len(unrefs)) for i, ptr := range unrefs { original, err := chains.originalFromMostRecentOrSame(ptr) if err != nil { return nil, err } realOp.UnrefBlocks[i] = original } // The leading resolutionOp will take care of the updates. realOp.Updates = nil case *renameOp: updatesToFix = append(updatesToFix, &realOp.OldDir, &realOp.NewDir) ptrsToFix = append(ptrsToFix, &realOp.Renamed) // Hack: we need to fixup local conflict renames so that the block // update changes to the new block pointer. for i := range realOp.Updates { ptrsToFix = append(ptrsToFix, &realOp.Updates[i].Ref) } // Note: Unrefs from the original renameOp are now in a // separate rm operation. case *syncOp: updatesToFix = append(updatesToFix, &realOp.File) realOp.Updates = nil case *setAttrOp: updatesToFix = append(updatesToFix, &realOp.Dir) ptrsToFix = append(ptrsToFix, &realOp.File) // The leading resolutionOp will take care of the updates. realOp.Updates = nil } for _, update := range updatesToFix { newPtr, ok := updates[update.Unref] if !ok { continue } // Since the first op does all the heavy lifting of // updating pointers, we can set these to both just be the // new pointer var err error *update, err = makeBlockUpdate(newPtr, newPtr) if err != nil { return nil, err } } for _, ptr := range ptrsToFix { newPtr, ok := updates[*ptr] if !ok { continue } *ptr = newPtr } newOps = append(newOps, op) } return newOps, nil } // prepUpdateForPaths takes in the complete set of paths affected by a // set of changes, and organizes them into a tree, which it then syncs // using prepTree. It returns a map describing how blocks were // updated in the final update, as well as the complete set of blocks // that need to be put to the server (and cached) to complete this // update and a list of blocks that can be removed from the flushing // queue. func (fup folderUpdatePrepper) prepUpdateForPaths(ctx context.Context, lState *lockState, md *RootMetadata, unmergedChains, mergedChains *crChains, mostRecentUnmergedMD, mostRecentMergedMD ImmutableRootMetadata, resolvedPaths map[BlockPointer]path, lbc localBcache, newFileBlocks fileBlockMap, dirtyBcache DirtyBlockCache, copyBehavior prepFolderCopyBehavior) ( updates map[BlockPointer]BlockPointer, bps *blockPutState, blocksToDelete []kbfsblock.ID, err error) { updates = make(map[BlockPointer]BlockPointer) session, err := fup.config.KBPKI().GetCurrentSession(ctx) if err != nil { return nil, nil, nil, err } oldOps := md.data.Changes.Ops resOp, ok := oldOps[len(oldOps)-1].(*resolutionOp) if !ok { return nil, nil, nil, fmt.Errorf("dummy op is not gc: %s", oldOps[len(oldOps)-1]) } isSquash := mostRecentMergedMD.data.Dir.BlockPointer != mergedChains.mostRecentChainMDInfo.rootInfo.BlockPointer if isSquash { // Squashes don't need to sync anything new. Just set the // root pointer to the most recent root pointer, and fill up // the resolution op with all the known chain updates for this // branch. bps = newBlockPutState(0) md.data.Dir.BlockInfo = unmergedChains.mostRecentChainMDInfo.rootInfo for original, chain := range unmergedChains.byOriginal { if unmergedChains.isCreated(original) || unmergedChains.isDeleted(original) || chain.original == chain.mostRecent { continue } resOp.AddUpdate(original, chain.mostRecent) } } else { // Construct a tree out of the merged paths, and do a sync at each leaf. root := fup.makeSyncTree(ctx, resolvedPaths, lbc, newFileBlocks) if root != nil { bps, err = fup.prepTree(ctx, lState, unmergedChains, md, session.UID, root, BlockPointer{}, lbc, newFileBlocks, dirtyBcache, copyBehavior) if err != nil { return nil, nil, nil, err } } else { bps = newBlockPutState(0) } } // Create an update map, and fix up the gc ops. for i, update := range resOp.Updates { fup.log.CDebugf(ctx, "resOp update: %v -> %v", update.Unref, update.Ref) // The unref should represent the most recent merged pointer // for the block. However, the other ops will be using the // original pointer as the unref, so use that as the key. updates[update.Unref] = update.Ref if chain, ok := mergedChains.byMostRecent[update.Unref]; ok { updates[chain.original] = update.Ref } // Fix the gc updates to make sure they all unref the most // recent block pointer. In cases where the two users create // the same directory independently, the update might // currently unref the unmerged most recent pointer. if chain, ok := unmergedChains.byMostRecent[update.Unref]; ok { // In case there was no merged chain above, map the // original to the ref again. updates[chain.original] = update.Ref mergedMostRecent, err := mergedChains.mostRecentFromOriginalOrSame(chain.original) if err != nil { return nil, nil, nil, err } fup.log.CDebugf(ctx, "Fixing resOp update from unmerged most "+ "recent %v to merged most recent %v", update.Unref, mergedMostRecent) err = update.setUnref(mergedMostRecent) if err != nil { return nil, nil, nil, err } resOp.Updates[i] = update updates[update.Unref] = update.Ref } } // Also add in file updates from sync operations, since the // resolutionOp may not include file-specific updates. Start from // the end of the list, so we use the final sync op for each file. for i := len(oldOps) - 1; i >= 0; i-- { op := oldOps[i] so, ok := op.(*syncOp) if !ok { continue } if _, ok := updates[so.File.Unref]; !ok { fup.log.CDebugf(ctx, "Adding sync op update %v -> %v", so.File.Unref, so.File.Ref) updates[so.File.Unref] = so.File.Ref resOp.AddUpdate(so.File.Unref, so.File.Ref) } } // For all chains that were created only in the unmerged branch, // make sure we update all the pointers to their most recent // version. for original, chain := range unmergedChains.byOriginal { if !unmergedChains.isCreated(original) || mergedChains.isCreated(original) { continue } if _, ok := updates[chain.original]; !ok { updates[chain.original] = chain.mostRecent } } // For all chains that were updated in both branches, make sure // the most recent unmerged pointer updates to the most recent // merged pointer. Normally this would get fixed up in the resOp // loop above, but that will miss directories that were not // updated as part of the resolution. (For example, if a file was // moved out of a directory in the merged branch, but an attr was // set on that file in the unmerged branch.) for unmergedOriginal := range unmergedChains.byOriginal { mergedChain, ok := mergedChains.byOriginal[unmergedOriginal] if !ok { continue } if _, ok := updates[unmergedOriginal]; !ok { updates[unmergedOriginal] = mergedChain.mostRecent } } // For all chains that were renamed only in the unmerged branch, // make sure we update all the pointers to their most recent // version. for original := range unmergedChains.renamedOriginals { mergedChain, ok := mergedChains.byOriginal[original] if !ok { continue } updates[original] = mergedChain.mostRecent } // Consolidate any chains of updates for k, v := range updates { if v2, ok := updates[v]; ok { updates[k] = v2 delete(updates, v) } } newOps, err := fixOpPointersForUpdate(oldOps[:len(oldOps)-1], updates, unmergedChains) if err != nil { return nil, nil, nil, err } // Clean up any gc updates that don't refer to blocks that exist // in the merged branch. var newUpdates []blockUpdate for _, update := range resOp.Updates { // Ignore it if it doesn't descend from an original block // pointer or one created in the merged branch. if _, ok := unmergedChains.originals[update.Unref]; !ok && (unmergedChains.byOriginal[update.Unref] == nil || unmergedChains.isCreated(update.Unref)) && mergedChains.byMostRecent[update.Unref] == nil { fup.log.CDebugf(ctx, "Turning update from %v into just a ref for %v", update.Unref, update.Ref) resOp.AddRefBlock(update.Ref) continue } newUpdates = append(newUpdates, update) } resOp.Updates = newUpdates // Also include rmop unrefs for chains that were deleted in the // unmerged branch (and so wouldn't be included in the resolved // ops), and not re-created by some action in the merged branch. // These need to be in the resolution for proper block accounting // and invalidation. for original, chain := range unmergedChains.byOriginal { mergedChain := mergedChains.byOriginal[original] if chain.isFile() || !unmergedChains.isDeleted(original) || mergedChains.isDeleted(original) || (mergedChain != nil && len(mergedChain.ops) > 0) { continue } for _, op := range chain.ops { if _, ok := op.(*rmOp); !ok { continue } // TODO: We might need to include these rmOps in the // actual resolved MD, to send the proper invalidations // into the kernel before we rm the parent. for _, ptr := range op.Unrefs() { if unrefOrig, ok := unmergedChains.originals[ptr]; ok { ptr = unrefOrig } newOps = addUnrefToFinalResOp(newOps, ptr) } } } if len(unmergedChains.resOps) > 0 { newBlocks := make(map[BlockPointer]bool) for _, bs := range bps.blockStates { newBlocks[bs.blockPtr] = true } // Look into the previous unmerged resolution ops and decide // which updates we want to keep. We should only keep those // that correspond to uploaded blocks, or ones that are the // most recent block on a chain and haven't yet been involved // in an update during this resolution. Unreference any // blocks that aren't the most recent blocks on their chains. for _, unmergedResOp := range unmergedChains.resOps { // Updates go in the first one. for _, update := range unmergedResOp.allUpdates() { chain, isMostRecent := unmergedChains.byMostRecent[update.Ref] isDeleted := false alreadyUpdated := false if isMostRecent { isDeleted = unmergedChains.isDeleted(chain.original) || unmergedChains.toUnrefPointers[update.Ref] _, alreadyUpdated = updates[chain.original] } if newBlocks[update.Ref] || (isMostRecent && !isDeleted && !alreadyUpdated) { fup.log.CDebugf(ctx, "Including update from old resOp: "+ "%v -> %v", update.Unref, update.Ref) resOp.AddUpdate(update.Unref, update.Ref) } else if !isMostRecent { fup.log.CDebugf(ctx, "Unrefing an update from old resOp: "+ "%v (original=%v)", update.Ref, update.Unref) newOps = addUnrefToFinalResOp(newOps, update.Ref) } } } } newOps[0] = resOp // move the dummy ops to the front md.data.Changes.Ops = newOps // TODO: only perform this loop if debugging is enabled. for _, op := range newOps { fup.log.CDebugf(ctx, "remote op %s: refs: %v", op, op.Refs()) fup.log.CDebugf(ctx, "remote op %s: unrefs: %v", op, op.Unrefs()) for _, update := range op.allUpdates() { fup.log.CDebugf(ctx, "remote op %s: update: %v -> %v", op, update.Unref, update.Ref) } } blocksToDelete, err = fup.updateResolutionUsageAndPointers(ctx, lState, md, bps, unmergedChains, mergedChains, mostRecentUnmergedMD, mostRecentMergedMD, isSquash) if err != nil { return nil, nil, nil, err } // Any refs (child block change pointers) and unrefs (dropped // unmerged block pointers) from previous resolutions go in a new // resolutionOp at the end, so we don't attempt to count any of // the bytes in the unref bytes count -- all of these pointers are // guaranteed to have been created purely within the unmerged // branch. if len(unmergedChains.resOps) > 0 { toDeleteMap := make(map[kbfsblock.ID]bool) for _, id := range blocksToDelete { toDeleteMap[id] = true } for _, unmergedResOp := range unmergedChains.resOps { for i := len(unmergedResOp.Refs()) - 1; i >= 0; i-- { ptr := unmergedResOp.Refs()[i] if unmergedChains.blockChangePointers[ptr] && !toDeleteMap[ptr.ID] { fup.log.CDebugf(ctx, "Ignoring block change ptr %v", ptr) unmergedResOp.DelRefBlock(ptr) md.data.Changes.Ops = addUnrefToFinalResOp(md.data.Changes.Ops, ptr) } } for _, ptr := range unmergedResOp.Unrefs() { fup.log.CDebugf(ctx, "Unref pointer from old resOp: %v", ptr) md.data.Changes.Ops = addUnrefToFinalResOp( md.data.Changes.Ops, ptr) } } } // do the block changes need their own blocks? bsplit := fup.config.BlockSplitter() if !bsplit.ShouldEmbedBlockChanges(&md.data.Changes) { // The child blocks should be referenced in the resolution op. _, ok := md.data.Changes.Ops[len(md.data.Changes.Ops)-1].(*resolutionOp) if !ok { // Append directly to the ops list, rather than use AddOp, // because the size estimate was already calculated. md.data.Changes.Ops = append(md.data.Changes.Ops, newResolutionOp()) } err = fup.unembedBlockChanges(ctx, bps, md, &md.data.Changes, session.UID) if err != nil { return nil, nil, nil, err } } return updates, bps, blocksToDelete, nil }
1
16,594
Is there a reason not to do something like `if _, ok := doNotUnref[ptr.ID]; ok { return ops }` here?
keybase-kbfs
go
@@ -539,7 +539,10 @@ class WebElement(object): @property def rect(self): """A dictionary with the size and location of the element.""" - return self._execute(Command.GET_ELEMENT_RECT)['value'] + if self._w3c: + return self._execute(Command.GET_ELEMENT_RECT)['value'] + else: + return self.size, self.location @property def screenshot_as_base64(self):
1
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import base64 import hashlib import os import pkgutil import warnings import zipfile from selenium.common.exceptions import WebDriverException from selenium.webdriver.common.by import By from selenium.webdriver.common.utils import keys_to_typing from .command import Command # Python 3 imports try: str = basestring except NameError: pass try: from StringIO import StringIO as IOStream except ImportError: # 3+ from io import BytesIO as IOStream # not relying on __package__ here as it can be `None` in some situations (see #4558) _pkg = '.'.join(__name__.split('.')[:-1]) getAttribute_js = pkgutil.get_data(_pkg, 'getAttribute.js').decode('utf8') isDisplayed_js = pkgutil.get_data(_pkg, 'isDisplayed.js').decode('utf8') class WebElement(object): """Represents a DOM element. Generally, all interesting operations that interact with a document will be performed through this interface. All method calls will do a freshness check to ensure that the element reference is still valid. This essentially determines whether or not the element is still attached to the DOM. If this test fails, then an ``StaleElementReferenceException`` is thrown, and all future calls to this instance will fail.""" def __init__(self, parent, id_, w3c=False): self._parent = parent self._id = id_ self._w3c = w3c def __repr__(self): return '<{0.__module__}.{0.__name__} (session="{1}", element="{2}")>'.format( type(self), self._parent.session_id, self._id) @property def tag_name(self): """This element's ``tagName`` property.""" return self._execute(Command.GET_ELEMENT_TAG_NAME)['value'] @property def text(self): """The text of the element.""" return self._execute(Command.GET_ELEMENT_TEXT)['value'] def click(self): """Clicks the element.""" self._execute(Command.CLICK_ELEMENT) def submit(self): """Submits a form.""" if self._w3c: form = self.find_element(By.XPATH, "./ancestor-or-self::form") self._parent.execute_script( "var e = arguments[0].ownerDocument.createEvent('Event');" "e.initEvent('submit', true, true);" "if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }", form) else: self._execute(Command.SUBMIT_ELEMENT) def clear(self): """Clears the text if it's a text entry element.""" self._execute(Command.CLEAR_ELEMENT) def get_property(self, name): """ Gets the given property of the element. :Args: - name - Name of the property to retrieve. Example:: text_length = target_element.get_property("text_length") """ try: return self._execute(Command.GET_ELEMENT_PROPERTY, {"name": name})["value"] except WebDriverException: # if we hit an end point that doesnt understand getElementProperty lets fake it return self.parent.execute_script('return arguments[0][arguments[1]]', self, name) def get_attribute(self, name): """Gets the given attribute or property of the element. This method will first try to return the value of a property with the given name. If a property with that name doesn't exist, it returns the value of the attribute with the same name. If there's no attribute with that name, ``None`` is returned. Values which are considered truthy, that is equals "true" or "false", are returned as booleans. All other non-``None`` values are returned as strings. For attributes or properties which do not exist, ``None`` is returned. :Args: - name - Name of the attribute/property to retrieve. Example:: # Check if the "active" CSS class is applied to an element. is_active = "active" in target_element.get_attribute("class") """ attributeValue = '' if self._w3c: attributeValue = self.parent.execute_script( "return (%s).apply(null, arguments);" % getAttribute_js, self, name) else: resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name}) attributeValue = resp.get('value') if attributeValue is not None: if name != 'value' and attributeValue.lower() in ('true', 'false'): attributeValue = attributeValue.lower() return attributeValue def is_selected(self): """Returns whether the element is selected. Can be used to check if a checkbox or radio button is selected. """ return self._execute(Command.IS_ELEMENT_SELECTED)['value'] def is_enabled(self): """Returns whether the element is enabled.""" return self._execute(Command.IS_ELEMENT_ENABLED)['value'] def find_element_by_id(self, id_): """Finds element within this element's children by ID. :Args: - id\_ - ID of child element to locate. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: foo_element = element.find_element_by_id('foo') """ return self.find_element(by=By.ID, value=id_) def find_elements_by_id(self, id_): """Finds a list of elements within this element's children by ID. Will return a list of webelements if found, or an empty list if not. :Args: - id\_ - Id of child element to find. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: elements = element.find_elements_by_id('foo') """ return self.find_elements(by=By.ID, value=id_) def find_element_by_name(self, name): """Finds element within this element's children by name. :Args: - name - name property of the element to find. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_name('foo') """ return self.find_element(by=By.NAME, value=name) def find_elements_by_name(self, name): """Finds a list of elements within this element's children by name. :Args: - name - name property to search for. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: elements = element.find_elements_by_name('foo') """ return self.find_elements(by=By.NAME, value=name) def find_element_by_link_text(self, link_text): """Finds element within this element's children by visible link text. :Args: - link_text - Link text string to search for. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_link_text('Sign In') """ return self.find_element(by=By.LINK_TEXT, value=link_text) def find_elements_by_link_text(self, link_text): """Finds a list of elements within this element's children by visible link text. :Args: - link_text - Link text string to search for. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: elements = element.find_elements_by_link_text('Sign In') """ return self.find_elements(by=By.LINK_TEXT, value=link_text) def find_element_by_partial_link_text(self, link_text): """Finds element within this element's children by partially visible link text. :Args: - link_text: The text of the element to partially match on. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_partial_link_text('Sign') """ return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_elements_by_partial_link_text(self, link_text): """Finds a list of elements within this element's children by link text. :Args: - link_text: The text of the element to partial match on. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: elements = element.find_elements_by_partial_link_text('Sign') """ return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_element_by_tag_name(self, name): """Finds element within this element's children by tag name. :Args: - name - name of html tag (eg: h1, a, span) :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_tag_name('h1') """ return self.find_element(by=By.TAG_NAME, value=name) def find_elements_by_tag_name(self, name): """Finds a list of elements within this element's children by tag name. :Args: - name - name of html tag (eg: h1, a, span) :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: elements = element.find_elements_by_tag_name('h1') """ return self.find_elements(by=By.TAG_NAME, value=name) def find_element_by_xpath(self, xpath): """Finds element by xpath. :Args: - xpath - xpath of element to locate. "//input[@class='myelement']" Note: The base path will be relative to this element's location. This will select the first link under this element. :: myelement.find_element_by_xpath(".//a") However, this will select the first link on the page. :: myelement.find_element_by_xpath("//a") :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_xpath('//div/td[1]') """ return self.find_element(by=By.XPATH, value=xpath) def find_elements_by_xpath(self, xpath): """Finds elements within the element by xpath. :Args: - xpath - xpath locator string. Note: The base path will be relative to this element's location. This will select all links under this element. :: myelement.find_elements_by_xpath(".//a") However, this will select all links in the page itself. :: myelement.find_elements_by_xpath("//a") :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: elements = element.find_elements_by_xpath("//div[contains(@class, 'foo')]") """ return self.find_elements(by=By.XPATH, value=xpath) def find_element_by_class_name(self, name): """Finds element within this element's children by class name. :Args: - name: The class name of the element to find. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_class_name('foo') """ return self.find_element(by=By.CLASS_NAME, value=name) def find_elements_by_class_name(self, name): """Finds a list of elements within this element's children by class name. :Args: - name: The class name of the elements to find. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: elements = element.find_elements_by_class_name('foo') """ return self.find_elements(by=By.CLASS_NAME, value=name) def find_element_by_css_selector(self, css_selector): """Finds element within this element's children by CSS selector. :Args: - css_selector - CSS selector string, ex: 'a.nav#home' :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: element = element.find_element_by_css_selector('#foo') """ return self.find_element(by=By.CSS_SELECTOR, value=css_selector) def find_elements_by_css_selector(self, css_selector): """Finds a list of elements within this element's children by CSS selector. :Args: - css_selector - CSS selector string, ex: 'a.nav#home' :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: elements = element.find_elements_by_css_selector('.foo') """ return self.find_elements(by=By.CSS_SELECTOR, value=css_selector) def send_keys(self, *value): """Simulates typing into the element. :Args: - value - A string for typing, or setting form fields. For setting file inputs, this could be a local file path. Use this to send simple key events or to fill out form fields:: form_textfield = driver.find_element_by_name('username') form_textfield.send_keys("admin") This can also be used to set file inputs. :: file_input = driver.find_element_by_name('profilePic') file_input.send_keys("path/to/profilepic.gif") # Generally it's better to wrap the file path in one of the methods # in os.path to return the actual path to support cross OS testing. # file_input.send_keys(os.path.abspath("path/to/profilepic.gif")) """ # transfer file to another machine only if remote driver is used # the same behaviour as for java binding if self.parent._is_remote: local_file = self.parent.file_detector.is_local_file(*value) if local_file is not None: value = self._upload(local_file) self._execute(Command.SEND_KEYS_TO_ELEMENT, {'text': "".join(keys_to_typing(value)), 'value': keys_to_typing(value)}) # RenderedWebElement Items def is_displayed(self): """Whether the element is visible to a user.""" # Only go into this conditional for browsers that don't use the atom themselves if self._w3c and self.parent.capabilities['browserName'] == 'safari': return self.parent.execute_script( "return (%s).apply(null, arguments);" % isDisplayed_js, self) else: return self._execute(Command.IS_ELEMENT_DISPLAYED)['value'] @property def location_once_scrolled_into_view(self): """THIS PROPERTY MAY CHANGE WITHOUT WARNING. Use this to discover where on the screen an element is so that we can click it. This method should cause the element to be scrolled into view. Returns the top lefthand corner location on the screen, or ``None`` if the element is not visible. """ if self._w3c: old_loc = self._execute(Command.W3C_EXECUTE_SCRIPT, { 'script': "arguments[0].scrollIntoView(true); return arguments[0].getBoundingClientRect()", 'args': [self]})['value'] return {"x": round(old_loc['x']), "y": round(old_loc['y'])} else: return self._execute(Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW)['value'] @property def size(self): """The size of the element.""" size = {} if self._w3c: size = self._execute(Command.GET_ELEMENT_RECT)['value'] else: size = self._execute(Command.GET_ELEMENT_SIZE)['value'] new_size = {"height": size["height"], "width": size["width"]} return new_size def value_of_css_property(self, property_name): """The value of a CSS property.""" return self._execute(Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, { 'propertyName': property_name})['value'] @property def location(self): """The location of the element in the renderable canvas.""" if self._w3c: old_loc = self._execute(Command.GET_ELEMENT_RECT)['value'] else: old_loc = self._execute(Command.GET_ELEMENT_LOCATION)['value'] new_loc = {"x": round(old_loc['x']), "y": round(old_loc['y'])} return new_loc @property def rect(self): """A dictionary with the size and location of the element.""" return self._execute(Command.GET_ELEMENT_RECT)['value'] @property def screenshot_as_base64(self): """ Gets the screenshot of the current element as a base64 encoded string. :Usage: img_b64 = element.screenshot_as_base64 """ return self._execute(Command.ELEMENT_SCREENSHOT)['value'] @property def screenshot_as_png(self): """ Gets the screenshot of the current element as a binary data. :Usage: element_png = element.screenshot_as_png """ return base64.b64decode(self.screenshot_as_base64.encode('ascii')) def screenshot(self, filename): """ Saves a screenshot of the current element to a PNG image file. Returns False if there is any IOError, else returns True. Use full paths in your filename. :Args: - filename: The full path you wish to save your screenshot to. This should end with a `.png` extension. :Usage: element.screenshot('/Screenshots/foo.png') """ if not filename.lower().endswith('.png'): warnings.warn("name used for saved screenshot does not match file " "type. It should end with a `.png` extension", UserWarning) png = self.screenshot_as_png try: with open(filename, 'wb') as f: f.write(png) except IOError: return False finally: del png return True @property def parent(self): """Internal reference to the WebDriver instance this element was found from.""" return self._parent @property def id(self): """Internal ID used by selenium. This is mainly for internal use. Simple use cases such as checking if 2 webelements refer to the same element, can be done using ``==``:: if element1 == element2: print("These 2 are equal") """ return self._id def __eq__(self, element): return hasattr(element, 'id') and self._id == element.id def __ne__(self, element): return not self.__eq__(element) # Private Methods def _execute(self, command, params=None): """Executes a command against the underlying HTML element. Args: command: The name of the command to _execute as a string. params: A dictionary of named parameters to send with the command. Returns: The command's JSON response loaded into a dictionary object. """ if not params: params = {} params['id'] = self._id return self._parent.execute(command, params) def find_element(self, by=By.ID, value=None): """ Find an element given a By strategy and locator. Prefer the find_element_by_* methods when possible. :Usage: element = element.find_element(By.ID, 'foo') :rtype: WebElement """ if self._w3c: if by == By.ID: by = By.CSS_SELECTOR value = '[id="%s"]' % value elif by == By.TAG_NAME: by = By.CSS_SELECTOR elif by == By.CLASS_NAME: by = By.CSS_SELECTOR value = ".%s" % value elif by == By.NAME: by = By.CSS_SELECTOR value = '[name="%s"]' % value return self._execute(Command.FIND_CHILD_ELEMENT, {"using": by, "value": value})['value'] def find_elements(self, by=By.ID, value=None): """ Find elements given a By strategy and locator. Prefer the find_elements_by_* methods when possible. :Usage: element = element.find_elements(By.CLASS_NAME, 'foo') :rtype: list of WebElement """ if self._w3c: if by == By.ID: by = By.CSS_SELECTOR value = '[id="%s"]' % value elif by == By.TAG_NAME: by = By.CSS_SELECTOR elif by == By.CLASS_NAME: by = By.CSS_SELECTOR value = ".%s" % value elif by == By.NAME: by = By.CSS_SELECTOR value = '[name="%s"]' % value return self._execute(Command.FIND_CHILD_ELEMENTS, {"using": by, "value": value})['value'] def __hash__(self): return int(hashlib.md5(self._id.encode('utf-8')).hexdigest(), 16) def _upload(self, filename): fp = IOStream() zipped = zipfile.ZipFile(fp, 'w', zipfile.ZIP_DEFLATED) zipped.write(filename, os.path.split(filename)[1]) zipped.close() content = base64.encodestring(fp.getvalue()) if not isinstance(content, str): content = content.decode('utf-8') try: return self._execute(Command.UPLOAD_FILE, {'file': content})['value'] except WebDriverException as e: if "Unrecognized command: POST" in e.__str__(): return filename elif "Command not found: POST " in e.__str__(): return filename elif '{"status":405,"value":["GET","HEAD","DELETE"]}' in e.__str__(): return filename else: raise e
1
15,812
This would return a tuple of two dictionaries. You need to combine them and return a dictionary
SeleniumHQ-selenium
js
@@ -29,10 +29,10 @@ class Node(object): def __repr__(self): return self._class_repr() - def __call__(self): - return self._data() + def __call__(self, asa=None): + raise NotImplementedError - def _data(self): + def _data(self, asa=None): raise NotImplementedError
1
""" Nodes that represent the data in a Quilt package. """ import copy import os import pandas as pd from six import iteritems, string_types from .tools import core from .tools.const import SYSTEM_METADATA from .tools.util import is_nodename class Node(object): """ Abstract class that represents a group or a leaf node in a package. """ def __init__(self, meta): # Can't instantiate it directly assert self.__class__ != Node.__class__ assert meta is not None self._meta = meta def _class_repr(self): """Only exists to make it easier for subclasses to customize `__repr__`.""" return "<%s>" % self.__class__.__name__ def __repr__(self): return self._class_repr() def __call__(self): return self._data() def _data(self): raise NotImplementedError class DataNode(Node): """ Represents a dataframe or a file. Allows accessing the contents using `()`. """ def __init__(self, package, node, data, meta): super(DataNode, self).__init__(meta) self._package = package self._node = node self.__cached_data = data def _data(self): """ Returns the contents of the node: a dataframe or a file path. """ if self.__cached_data is None: # TODO(dima): Temporary code. store = self._package.get_store() if isinstance(self._node, core.TableNode): self.__cached_data = store.load_dataframe(self._node.hashes) elif isinstance(self._node, core.FileNode): self.__cached_data = store.get_file(self._node.hashes) else: assert False return self.__cached_data class GroupNode(Node): """ Represents a group in a package. Allows accessing child objects using the dot notation. Warning: calling _data() on a large dataset may exceed local memory capacity in Python (Only supported for Parquet packages). """ def __init__(self, meta): super(GroupNode, self).__init__(meta) def __setattr__(self, name, value): if name.startswith('_') or isinstance(value, Node): super(Node, self).__setattr__(name, value) else: raise AttributeError("{val} is not a valid package node".format(val=value)) def __repr__(self): pinfo = super(GroupNode, self).__repr__() group_info = '\n'.join(name + '/' for name in sorted(self._group_keys())) if group_info: group_info += '\n' data_info = '\n'.join(sorted(self._data_keys())) return '%s\n%s%s' % (pinfo, group_info, data_info) def _items(self): return ((name, child) for name, child in iteritems(self.__dict__) if not name.startswith('_')) def _data_keys(self): """ every child key referencing a dataframe """ return [name for name, child in self._items() if not isinstance(child, GroupNode)] def _group_keys(self): """ every child key referencing a group that is not a dataframe """ return [name for name, child in self._items() if isinstance(child, GroupNode)] def _keys(self): """ keys directly accessible on this object via getattr or . """ return [name for name in self.__dict__ if not name.startswith('_')] def _add_group(self, groupname): child = GroupNode({}) setattr(self, groupname, child) def _data(self): """ Merges all child dataframes. Only works for dataframes stored on disk - not in memory. """ store = None hash_list = [] stack = [self] while stack: node = stack.pop() if isinstance(node, GroupNode): stack.extend(child for _, child in sorted(node._items(), reverse=True)) else: if not isinstance(node._node, core.TableNode): raise ValueError("Group contains non-dataframe nodes") if not node._node.hashes: raise NotImplementedError("Can only merge built dataframes. Build this package and try again.") node_store = node._package.get_store() if store is None: store = node_store elif node_store is not store: raise NotImplementedError("Can only merge dataframes from the same store") hash_list += node._node.hashes if not hash_list: return None return store.load_dataframe(hash_list) def _create_filter_func(filter_dict): filter_name = filter_dict.pop('name', None) if filter_name is not None and not isinstance(filter_name, string_types): raise ValueError("Invalid 'name': %r" % filter_name) filter_meta = filter_dict.pop('meta', None) if filter_meta is not None and not isinstance(filter_meta, dict): raise ValueError("Invalid 'meta': %r" % filter_meta) if filter_dict: raise ValueError("Unexpected data in the filter: %r; only 'name' and 'meta' are supported" % filter_dict) def helper(value, expected): if isinstance(expected, dict): if isinstance(value, dict): for expected_key, expected_value in iteritems(expected): if not helper(value.get(expected_key), expected_value): return False return True else: return False else: return value == expected def func(node, name): if filter_name is not None and filter_name != name: return False if filter_meta is not None and not helper(node._meta, filter_meta): return False return True return func class PackageNode(GroupNode): """ Represents a package. """ def __init__(self, package, meta): super(PackageNode, self).__init__(meta) self._package = package def _class_repr(self): finfo = self._package.get_path() if self._package is not None else '' return "<%s %r>" % (self.__class__.__name__, finfo) def _set(self, path, value, build_dir=''): """Create and set a node by path This creates a node from a filename or pandas DataFrame. If `value` is a filename, it must be relative to `build_dir`. `value` is stored as the export path. `build_dir` defaults to the current directory, but may be any arbitrary directory path, including an absolute path. Example: # Set `pkg.graph_image` to the data in '/home/user/bin/graph.png'. # If exported, it would export to '<export_dir>/bin/graph.png' `pkg._set(['graph_image'], 'bin/fizz.bin', '/home/user')` :param path: Path list -- I.e. ['examples', 'new_node'] :param value: Pandas dataframe, or a filename relative to build_dir :param build_dir: Directory containing `value` if value is a filename. """ assert isinstance(path, list) and len(path) > 0 if isinstance(value, pd.DataFrame): metadata = {} core_node = core.TableNode(hashes=[], format=core.PackageFormat.default.value) elif isinstance(value, string_types + (bytes,)): # bytes -> string for consistency when retrieving metadata value = value.decode() if isinstance(value, bytes) else value if os.path.isabs(value): raise ValueError("Invalid path: expected a relative path, but received {!r}".format(value)) # Security: filepath does not and should not retain the build_dir's location! metadata = {SYSTEM_METADATA: {'filepath': value, 'transform': 'id'}} core_node = core.FileNode(hashes=[]) if build_dir: value = os.path.join(build_dir, value) else: accepted_types = tuple(set((pd.DataFrame, bytes) + string_types)) raise TypeError("Bad value type: Expected instance of any type {!r}, but received type {!r}" .format(accepted_types, type(value)), repr(value)[0:100]) for key in path: if not is_nodename(key): raise ValueError("Invalid name for node: {}".format(key)) node = self for key in path[:-1]: child = getattr(node, key, None) if not isinstance(child, GroupNode): child = GroupNode({}) setattr(node, key, child) node = child key = path[-1] data_node = DataNode(self._package, core_node, value, metadata) setattr(node, key, data_node) def _filter(self, lambda_or_dict): if isinstance(lambda_or_dict, dict): func = _create_filter_func(lambda_or_dict) elif callable(lambda_or_dict): func = lambda_or_dict else: raise ValueError def _filter_node(name, node, func): matched = func(node, name) if isinstance(node, GroupNode): if isinstance(node, PackageNode): filtered = PackageNode(None, copy.deepcopy(node._meta)) else: filtered = GroupNode(copy.deepcopy(node._meta)) for child_name, child_node in node._items(): # If the group itself matched, then match all children by using a True filter. child_func = (lambda *args: True) if matched else func filtered_child = _filter_node(child_name, child_node, child_func) if filtered_child is not None: setattr(filtered, child_name, filtered_child) # Return the group if: # 1) It has children, or # 2) Group itself matched the filter, or # 3) It's the package itself. if matched or next(filtered._items(), None) or node == self: return filtered else: if matched: return node return None return _filter_node('', self, func)
1
16,721
Why not always forward it to `_data`?
quiltdata-quilt
py
@@ -263,7 +263,7 @@ def connect_spotify_callback(): return redirect(url_for('profile.connect_spotify')) -@profile_bp.route('/refresh-spotify-token', methods=['GET']) +@profile_bp.route('/refresh-spotify-token', methods=['POST']) @crossdomain() @api_login_required def refresh_spotify_token():
1
import listenbrainz.db.stats as db_stats import listenbrainz.db.user as db_user import listenbrainz.webserver.rabbitmq_connection as rabbitmq_connection from listenbrainz.webserver.decorators import crossdomain import os import re import ujson import zipfile from datetime import datetime from flask import Blueprint, render_template, request, url_for, redirect, current_app, make_response, jsonify from flask_login import current_user, login_required import spotipy.oauth2 from werkzeug.exceptions import NotFound, BadRequest, RequestEntityTooLarge, InternalServerError from listenbrainz.webserver.errors import APIBadRequest, APIServiceUnavailable, APINotFound from werkzeug.utils import secure_filename from listenbrainz import webserver from listenbrainz.db.exceptions import DatabaseException from listenbrainz.domain import spotify from listenbrainz.stats.utils import construct_stats_queue_key from listenbrainz.webserver import flash from listenbrainz.webserver.login import api_login_required from listenbrainz.webserver.redis_connection import _redis from listenbrainz.webserver.influx_connection import _influx from listenbrainz.webserver.utils import sizeof_readable from listenbrainz.webserver.views.user import delete_user, _get_user from listenbrainz.webserver.views.api_tools import insert_payload, validate_listen, \ LISTEN_TYPE_IMPORT, publish_data_to_queue from os import path, makedirs from time import time from werkzeug.exceptions import NotFound, BadRequest, RequestEntityTooLarge, InternalServerError from werkzeug.utils import secure_filename profile_bp = Blueprint("profile", __name__) EXPORT_FETCH_COUNT = 5000 @profile_bp.route("/resettoken", methods=["GET", "POST"]) @login_required def reset_token(): if request.method == "POST": token = request.form.get("token") if token != current_user.auth_token: raise BadRequest("Can only reset token of currently logged in user") reset = request.form.get("reset") if reset == "yes": try: db_user.update_token(current_user.id) flash.info("Access token reset") except DatabaseException: flash.error("Something went wrong! Unable to reset token right now.") return redirect(url_for("profile.info")) else: token = current_user.auth_token return render_template( "user/resettoken.html", token=token, ) @profile_bp.route("/resetlatestimportts", methods=["GET", "POST"]) @login_required def reset_latest_import_timestamp(): if request.method == "POST": token = request.form.get("token") if token != current_user.auth_token: raise BadRequest("Can only reset latest import timestamp of currently logged in user") reset = request.form.get("reset") if reset == "yes": try: db_user.reset_latest_import(current_user.musicbrainz_id) flash.info("Latest import time reset, we'll now import all your data instead of stopping at your last imported listen.") except DatabaseException: flash.error("Something went wrong! Unable to reset latest import timestamp right now.") return redirect(url_for("profile.info")) else: token = current_user.auth_token return render_template( "profile/resetlatestimportts.html", token=token, ) @profile_bp.route("/") @login_required def info(): # check if user is in stats calculation queue or if valid stats already exist in_stats_queue = _redis.redis.get(construct_stats_queue_key(current_user.musicbrainz_id)) == 'queued' try: stats_exist = db_stats.valid_stats_exist(current_user.id) except DatabaseException: stats_exist = False return render_template( "profile/info.html", user=current_user, in_stats_queue=in_stats_queue, stats_exist=stats_exist, ) @profile_bp.route("/import") @login_required def import_data(): """ Displays the import page to user, giving various options """ # Return error if LASTFM_API_KEY is not given in config.py if 'LASTFM_API_KEY' not in current_app.config or current_app.config['LASTFM_API_KEY'] == "": return NotFound("LASTFM_API_KEY not specified.") return render_template( "user/import.html", user=current_user, scraper_url=url_for( "user.lastfmscraper", user_name=current_user.musicbrainz_id, _external=True, ), ) @profile_bp.route("/export", methods=["GET", "POST"]) @login_required def export_data(): """ Exporting the data to json """ if request.method == "POST": db_conn = webserver.create_influx(current_app) filename = current_user.musicbrainz_id + "_lb-" + datetime.today().strftime('%Y-%m-%d') + ".json" # fetch all listens for the user from listenstore by making repeated queries to # listenstore until we get all the data to_ts = int(time()) listens = [] while True: batch = db_conn.fetch_listens(current_user.musicbrainz_id, to_ts=to_ts, limit=EXPORT_FETCH_COUNT) if not batch: break listens.extend(batch) to_ts = batch[-1].ts_since_epoch # new to_ts will the the timestamp of the last listen fetched # Fetch output and convert it into dict with keys as indexes output = [] for index, obj in enumerate(listens): dic = obj.data dic['timestamp'] = obj.ts_since_epoch dic['release_msid'] = None if obj.release_msid is None else str(obj.release_msid) dic['artist_msid'] = None if obj.artist_msid is None else str(obj.artist_msid) dic['recording_msid'] = None if obj.recording_msid is None else str(obj.recording_msid) output.append(dic) response = make_response(ujson.dumps(output)) response.headers["Content-Disposition"] = "attachment; filename=" + filename response.headers['Content-Type'] = 'application/json; charset=utf-8' response.mimetype = "text/json" return response else: return render_template("user/export.html", user=current_user) @profile_bp.route('/request-stats', methods=['GET']) @login_required def request_stats(): """ Check if the current user's statistics have been calculated and if not, put them in the stats queue for stats_calculator. """ status = _redis.redis.get(construct_stats_queue_key(current_user.musicbrainz_id)) == 'queued' if status == 'queued': flash.info('You have already been added to the stats calculation queue! Please check back later.') elif db_stats.valid_stats_exist(current_user.id): flash.info('Your stats were calculated in the most recent stats calculation interval,' ' please wait until the next interval! We calculate new statistics every Monday at 00:00 UTC.') else: # publish to rabbitmq queue that the stats-calculator consumes data = { 'type': 'user', 'id': current_user.id, 'musicbrainz_id': current_user.musicbrainz_id, } publish_data_to_queue( data=data, exchange=current_app.config['BIGQUERY_EXCHANGE'], queue=current_app.config['BIGQUERY_QUEUE'], error_msg='Could not put user %s into statistics calculation queue, please try again later', ) _redis.redis.set(construct_stats_queue_key(current_user.musicbrainz_id), 'queued') flash.info('You have been added to the stats calculation queue! Please check back later.') return redirect(url_for('profile.info')) @profile_bp.route('/delete', methods=['GET', 'POST']) @login_required def delete(): """ Delete currently logged-in user from ListenBrainz. If POST request, this view checks for the correct authorization token and deletes the user. If deletion is successful, redirects to home page, else flashes an error and redirects to user's info page. If GET request, this view renders a page asking the user to confirm that they wish to delete their ListenBrainz account. """ if request.method == 'POST': if request.form.get('token') == current_user.auth_token: try: delete_user(current_user.musicbrainz_id) except Exception as e: current_app.logger.error('Error while deleting %s: %s', current_user.musicbrainz_id, str(e)) flash.error('Error while deleting user %s, please try again later.' % current_user.musicbrainz_id) return redirect(url_for('profile.info')) return redirect(url_for('index.index')) else: flash.error('Cannot delete user due to error during authentication, please try again later.') return redirect('profile.info') else: return render_template( 'profile/delete.html', user=current_user, ) @profile_bp.route('/connect-spotify', methods=['GET', 'POST']) @login_required def connect_spotify(): if request.method == 'POST' and request.form.get('delete') == 'yes': spotify.remove_user(current_user.id) flash.success('Your Spotify account has been unlinked') user = spotify.get_user(current_user.id) only_listen_sp_oauth = spotify.get_spotify_oauth(spotify.SPOTIFY_LISTEN_PERMISSIONS) only_import_sp_oauth = spotify.get_spotify_oauth(spotify.SPOTIFY_IMPORT_PERMISSIONS) both_sp_oauth = spotify.get_spotify_oauth(spotify.SPOTIFY_LISTEN_PERMISSIONS + spotify.SPOTIFY_IMPORT_PERMISSIONS) return render_template( 'user/spotify.html', account=user, last_updated=user.last_updated_iso if user else None, latest_listened_at=user.latest_listened_at_iso if user else None, only_listen_url=only_listen_sp_oauth.get_authorize_url(), only_import_url=only_import_sp_oauth.get_authorize_url(), both_url=both_sp_oauth.get_authorize_url(), ) @profile_bp.route('/connect-spotify/callback') @login_required def connect_spotify_callback(): code = request.args.get('code') if not code: raise BadRequest('missing code') try: token = spotify.get_access_token(code) spotify.add_new_user(current_user.id, token) flash.success('Successfully authenticated with Spotify!') except spotipy.oauth2.SpotifyOauthError as e: current_app.logger.error('Unable to authenticate with Spotify: %s', str(e), exc_info=True) flash.warn('Unable to authenticate with Spotify (error {})'.format(e.args[0])) return redirect(url_for('profile.connect_spotify')) @profile_bp.route('/refresh-spotify-token', methods=['GET']) @crossdomain() @api_login_required def refresh_spotify_token(): spotify_user = spotify.get_user(current_user.id) if not spotify_user: raise APINotFound("User has not authenticated to Spotify") if spotify_user.token_expired: try: spotify_user = spotify.refresh_user_token(spotify_user) except spotify.SpotifyAPIError: raise APIServiceUnvailable("Cannot refresh Spotify token right now") return jsonify({ 'id': current_user.id, 'musicbrainz_id': current_user.musicbrainz_id, 'user_token': spotify_user.user_token, })
1
15,338
there are tests that use `client.get` which will fail now
metabrainz-listenbrainz-server
py
@@ -371,7 +371,7 @@ func (fs *FS) mkdirAll(filename string, perm os.FileMode) (err error) { switch errors.Cause(err).(type) { case libkbfs.NameExistsError: // The child directory already exists. - case libkbfs.WriteAccessError: + case libkbfs.WriteAccessError, libkbfs.WriteToReadonlyNodeError: // If the child already exists, this doesn't matter. var lookupErr error child, _, lookupErr = fs.config.KBFSOps().Lookup(fs.ctx, n, p)
1
// Copyright 2017 Keybase Inc. All rights reserved. // Use of this source code is governed by a BSD // license that can be found in the LICENSE file. package libfs import ( "bytes" "context" "crypto/rand" "encoding/base64" "net/http" "os" "path" "strings" "sync" "time" "github.com/keybase/client/go/logger" "github.com/keybase/client/go/protocol/keybase1" "github.com/keybase/kbfs/libkbfs" "github.com/pkg/errors" billy "gopkg.in/src-d/go-billy.v4" ) // FSEventType is FS event type. type FSEventType int const ( _ FSEventType = iota // FSEventLock indicates Lock method has been called. FSEventLock // FSEventUnlock indicates Unlock method has been called. FSEventUnlock ) // FSEvent is the type for events sent into the events channel passed into // NewFS. type FSEvent struct { EventType FSEventType File *File Done <-chan struct{} } type fsInner struct { config libkbfs.Config root libkbfs.Node rootInfo libkbfs.EntryInfo h *libkbfs.TlfHandle subdir string uniqID string log logger.Logger deferLog logger.Logger priority keybase1.MDPriority // lockNamespace is the prefix used by any *File generated by this *FS when // they need to generate a lockID. By default, we use a canonical unix path // of the root of this FS as lock namespace. But one can call // SetLockNamespace to set it explicitly, which can be any bytes. When // Chroot is called, a slash ('/') followed by the changed subpath are // appended to the existing lockNamespace to form the new one. Note that // this is a naive append without and path clean. lockNamespace []byte eventsLock sync.RWMutex events map[chan<- FSEvent]bool } // FS is a wrapper around a KBFS subdirectory that implements the // billy.Filesystem interface. It uses forward-slash separated paths. // It may return errors wrapped with the `github.com/pkg/errors` // package. type FS struct { // Yes, storing ctx in a struct is a mortal sin, but the // billy.Filesystem interface doesn't give us a way to accept ctxs // any other way. ctx context.Context *fsInner } var _ billy.Filesystem = (*FS)(nil) const ( maxSymlinkLevels = 40 // same as Linux ) func followSymlink(parentPath, link string) (newPath string, err error) { if path.IsAbs(link) { return "", errors.Errorf("Can't follow absolute link: %s", link) } newPath = path.Clean(path.Join(parentPath, link)) if strings.HasPrefix(newPath, "..") { return "", errors.Errorf( "Cannot follow symlink out of chroot: %s", newPath) } return newPath, nil } // NewFS returns a new FS instance, chroot'd to the given TLF and // subdir within that TLF. `subdir` must exist, and point to a // directory, before this function is called. `uniqID` needs to // uniquely identify this instance among all users of this TLF // globally; for example, a device ID combined with a local tempfile // name is recommended. func NewFS(ctx context.Context, config libkbfs.Config, tlfHandle *libkbfs.TlfHandle, subdir string, uniqID string, priority keybase1.MDPriority) (*FS, error) { rootNode, ei, err := config.KBFSOps().GetOrCreateRootNode( ctx, tlfHandle, libkbfs.MasterBranch) if err != nil { return nil, err } if subdir != "" { subdir = path.Clean(subdir) } // Look up the subdir's root. n := rootNode var parts []string if len(subdir) > 0 { parts = strings.Split(subdir, "/") } // Loop while we follow symlinks. outer: for { for i, p := range parts { n, ei, err = config.KBFSOps().Lookup(ctx, n, p) if err != nil { return nil, err } switch ei.Type { case libkbfs.Dir: continue case libkbfs.Sym: parentParts := parts[:i] newPath, err := followSymlink( path.Join(parentParts...), ei.SymPath) if err != nil { return nil, err } newParts := strings.Split(newPath, "/") newParts = append(newParts, parts[i+1:]...) // Fix subdir so we'll get the correct default lock namespace. oldSubdir := subdir subdir = path.Join(newParts...) config.MakeLogger("").CDebugf(ctx, "Expanding symlink: %s->%s", oldSubdir, subdir) parts = newParts n = rootNode continue outer default: return nil, errors.Errorf("%s is not a directory", path.Join(parts[:i]...)) } } // Successfully looked up all directories. break } log := config.MakeLogger("") log.CDebugf(ctx, "Made new FS for TLF=%s, subdir=%s", tlfHandle.GetCanonicalName(), subdir) // Use the canonical unix path for default locking namespace, as this needs // to be the same across all platforms. unixFullPath := path.Join("/keybase", tlfHandle.Type().String(), subdir) return &FS{ ctx: ctx, fsInner: &fsInner{ config: config, root: n, rootInfo: ei, h: tlfHandle, subdir: subdir, uniqID: uniqID, log: log, deferLog: log.CloneWithAddedDepth(1), lockNamespace: []byte(unixFullPath), priority: priority, events: make(map[chan<- FSEvent]bool), }, }, nil } // lookupOrCreateEntryNoFollow looks up the entry for a file in a // given parent node. If the entry is a symlink, it will return a nil // Node and a nil error. If the entry doesn't exist and O_CREATE is // set in `flag`, it will create the entry as a file. func (fs *FS) lookupOrCreateEntryNoFollow( dir libkbfs.Node, filename string, flag int, perm os.FileMode) ( libkbfs.Node, libkbfs.EntryInfo, error) { n, ei, err := fs.config.KBFSOps().Lookup(fs.ctx, dir, filename) switch errors.Cause(err).(type) { case libkbfs.NoSuchNameError: // The file doesn't exist yet; create if requested if flag&os.O_CREATE == 0 { return nil, libkbfs.EntryInfo{}, err } fs.log.CDebugf( fs.ctx, "Creating %s since it doesn't exist yet", filename) excl := libkbfs.NoExcl if flag&os.O_EXCL != 0 { excl = libkbfs.WithExcl } isExec := (perm & 0100) != 0 n, ei, err = fs.config.KBFSOps().CreateFile( fs.ctx, dir, filename, isExec, excl) switch errors.Cause(err).(type) { case libkbfs.NameExistsError: // Someone made it already; recurse to try the lookup again. fs.log.CDebugf( fs.ctx, "Attempting lookup again after failed create") return fs.lookupOrCreateEntryNoFollow(dir, filename, flag, perm) case nil: return n, ei, nil default: return nil, libkbfs.EntryInfo{}, err } case nil: // If we were supposed to have exclusively-created this file, // we must fail. if flag&os.O_CREATE != 0 && flag&os.O_EXCL != 0 { return nil, libkbfs.EntryInfo{}, errors.New("Exclusive create failed because the file exists") } if ei.Type == libkbfs.Sym { // The caller must retry if desired. return nil, ei, nil } return n, ei, nil default: return nil, libkbfs.EntryInfo{}, err } } // lookupParentWithDepth looks up the parent node of the given // filename. It follows symlinks in the path, but doesn't resolve the // final base name. If `exitEarly` is true, it returns on the first // not-found error and `base` will contain the subpath of filename not // yet found. func (fs *FS) lookupParentWithDepth( filename string, exitEarly bool, depth int) ( parent libkbfs.Node, parentDir, base string, err error) { parts := strings.Split(filename, "/") n := fs.root // Iterate through each of the parent directories of the file, but // not the file itself. for i := 0; i < len(parts)-1; i++ { p := parts[i] nextNode, ei, err := fs.config.KBFSOps().Lookup(fs.ctx, n, p) switch errors.Cause(err).(type) { case libkbfs.NoSuchNameError: if exitEarly { parentDir = path.Join(parts[:i]...) base = path.Join(parts[i:]...) return n, parentDir, base, nil } return nil, "", "", err case nil: n = nextNode default: return nil, "", "", err } switch ei.Type { case libkbfs.Sym: if depth == maxSymlinkLevels { return nil, "", "", errors.New("Too many levels of symlinks") } parentDir = path.Join(parts[:i]...) newPath, err := followSymlink(parentDir, ei.SymPath) if err != nil { return nil, "", "", err } newPathPlusRemainder := append([]string{newPath}, parts[i+1:]...) return fs.lookupParentWithDepth( path.Join(newPathPlusRemainder...), exitEarly, depth+1) case libkbfs.Dir: continue default: return nil, "", "", errors.Errorf("%s is not a directory", path.Join(parts[:i]...)) } } parentDir = path.Join(parts[:len(parts)-1]...) base = parts[len(parts)-1] return n, parentDir, base, nil } func (fs *FS) lookupParent(filename string) ( parent libkbfs.Node, parentDir, base string, err error) { return fs.lookupParentWithDepth(filename, false, 0) } // lookupOrCreateEntry looks up the entry for a filename, following // symlinks in the path (including if the final entry is a symlink). // If the entry doesn't exist an O_CREATE is set in `flag`, it will // create the entry as a file. func (fs *FS) lookupOrCreateEntry( filename string, flag int, perm os.FileMode) ( n libkbfs.Node, ei libkbfs.EntryInfo, err error) { // Shortcut the case where there's nothing to look up. if filename == "" || filename == "/" || filename == "." { return fs.root, fs.rootInfo, nil } filename = strings.TrimPrefix(filename, "/") for i := 0; i < maxSymlinkLevels; i++ { var parentDir, fName string n, parentDir, fName, err = fs.lookupParent(filename) if err != nil { return nil, libkbfs.EntryInfo{}, err } n, ei, err := fs.lookupOrCreateEntryNoFollow(n, fName, flag, perm) if err != nil { return nil, libkbfs.EntryInfo{}, err } if ei.Type != libkbfs.Sym { return n, ei, nil } fs.log.CDebugf(fs.ctx, "Following symlink=%s from dir=%s", ei.SymPath, parentDir) filename, err = followSymlink(parentDir, ei.SymPath) if err != nil { return nil, libkbfs.EntryInfo{}, err } } return nil, libkbfs.EntryInfo{}, errors.New("Too many levels of symlinks") } func translateErr(err error) error { switch errors.Cause(err).(type) { case libkbfs.NoSuchNameError: return os.ErrNotExist case libkbfs.TlfAccessError, libkbfs.ReadAccessError: return os.ErrPermission case libkbfs.NotDirError, libkbfs.NotFileError: return os.ErrInvalid case libkbfs.NameExistsError: return os.ErrExist default: return err } } func (fs *FS) mkdirAll(filename string, perm os.FileMode) (err error) { defer func() { err = translateErr(err) }() if filename == "/" || filename == "" || filename == "." { return nil } n, _, leftover, err := fs.lookupParentWithDepth(filename, true, 0) if err != nil { return err } parts := strings.Split(leftover, "/") // Make all necessary dirs. for _, p := range parts { child, _, err := fs.config.KBFSOps().CreateDir(fs.ctx, n, p) switch errors.Cause(err).(type) { case libkbfs.NameExistsError: // The child directory already exists. case libkbfs.WriteAccessError: // If the child already exists, this doesn't matter. var lookupErr error child, _, lookupErr = fs.config.KBFSOps().Lookup(fs.ctx, n, p) if lookupErr != nil { return err } case nil: default: return err } n = child } return nil } // OpenFile implements the billy.Filesystem interface for FS. func (fs *FS) OpenFile(filename string, flag int, perm os.FileMode) ( f billy.File, err error) { fs.log.CDebugf( fs.ctx, "OpenFile %s, flag=%d, perm=%o", filename, flag, perm) defer func() { fs.deferLog.CDebugf(fs.ctx, "OpenFile done: %+v", err) err = translateErr(err) }() err = fs.mkdirAll(path.Dir(filename), 0755) if err != nil && !os.IsExist(err) { return nil, err } n, ei, err := fs.lookupOrCreateEntry(filename, flag, perm) if err != nil { return nil, err } // Make sure this is a file. if !ei.Type.IsFile() { return nil, errors.Errorf("%s is not a file", filename) } if flag&os.O_TRUNC != 0 { err := fs.config.KBFSOps().Truncate(fs.ctx, n, 0) if err != nil { return nil, err } } offset := int64(0) if flag&os.O_APPEND != 0 { if ei.Size >= uint64(1<<63) { return nil, errors.New("offset too large") } offset = int64(ei.Size) } return &File{ fs: fs, filename: filename, node: n, readOnly: flag == os.O_RDONLY, offset: offset, }, nil } // Create implements the billy.Filesystem interface for FS. func (fs *FS) Create(filename string) (billy.File, error) { return fs.OpenFile(filename, os.O_CREATE, 0600) } // Open implements the billy.Filesystem interface for FS. func (fs *FS) Open(filename string) (billy.File, error) { return fs.OpenFile(filename, os.O_RDONLY, 0600) } // Stat implements the billy.Filesystem interface for FS. func (fs *FS) Stat(filename string) (fi os.FileInfo, err error) { fs.log.CDebugf(fs.ctx, "Stat %s", filename) defer func() { fs.deferLog.CDebugf(fs.ctx, "Stat done: %+v", err) err = translateErr(err) }() n, ei, err := fs.lookupOrCreateEntry(filename, os.O_RDONLY, 0) if err != nil { return nil, err } return &FileInfo{ fs: fs, ei: ei, name: n.GetBasename(), }, nil } // Rename implements the billy.Filesystem interface for FS. func (fs *FS) Rename(oldpath, newpath string) (err error) { fs.log.CDebugf(fs.ctx, "Rename %s -> %s", oldpath, newpath) defer func() { fs.deferLog.CDebugf(fs.ctx, "Rename done: %+v", err) err = translateErr(err) }() err = fs.mkdirAll(path.Dir(newpath), 0755) if err != nil && !os.IsExist(err) { return err } oldParent, _, oldBase, err := fs.lookupParent(oldpath) if err != nil { return err } newParent, _, newBase, err := fs.lookupParent(newpath) if err != nil { return err } return fs.config.KBFSOps().Rename( fs.ctx, oldParent, oldBase, newParent, newBase) } // Remove implements the billy.Filesystem interface for FS. func (fs *FS) Remove(filename string) (err error) { fs.log.CDebugf(fs.ctx, "Remove %s", filename) defer func() { fs.deferLog.CDebugf(fs.ctx, "Remove done: %+v", err) err = translateErr(err) }() parent, _, base, err := fs.lookupParent(filename) if err != nil { return err } _, ei, err := fs.config.KBFSOps().Lookup(fs.ctx, parent, base) if err != nil { return err } if ei.Type == libkbfs.Dir { return fs.config.KBFSOps().RemoveDir(fs.ctx, parent, base) } return fs.config.KBFSOps().RemoveEntry(fs.ctx, parent, base) } // Join implements the billy.Filesystem interface for FS. func (fs *FS) Join(elem ...string) string { return path.Clean(path.Join(elem...)) } // TempFile implements the billy.Filesystem interface for FS. func (fs *FS) TempFile(dir, prefix string) (billy.File, error) { // We'd have to turn off journaling to support TempFile perfectly, // but the given uniq ID and a random number should be good // enough. Especially since most users will end up renaming the // temp file before journal flushing even happens. b := make([]byte, 8) _, err := rand.Read(b) if err != nil { return nil, err } suffix := fs.uniqID + "-" + base64.URLEncoding.EncodeToString(b) return fs.OpenFile(path.Join(dir, prefix+suffix), os.O_CREATE|os.O_EXCL, 0600) } func (fs *FS) readDir(n libkbfs.Node) (fis []os.FileInfo, err error) { children, err := fs.config.KBFSOps().GetDirChildren(fs.ctx, n) if err != nil { return nil, err } fis = make([]os.FileInfo, 0, len(children)) for name, ei := range children { fis = append(fis, &FileInfo{ fs: fs, ei: ei, name: name, }) } return fis, nil } // ReadDir implements the billy.Filesystem interface for FS. func (fs *FS) ReadDir(p string) (fis []os.FileInfo, err error) { fs.log.CDebugf(fs.ctx, "ReadDir %s", p) defer func() { fs.deferLog.CDebugf(fs.ctx, "ReadDir done: %+v", err) err = translateErr(err) }() n, _, err := fs.lookupOrCreateEntry(p, os.O_RDONLY, 0) if err != nil { return nil, err } return fs.readDir(n) } // MkdirAll implements the billy.Filesystem interface for FS. func (fs *FS) MkdirAll(filename string, perm os.FileMode) (err error) { fs.log.CDebugf(fs.ctx, "MkdirAll %s", filename) defer func() { fs.deferLog.CDebugf(fs.ctx, "MkdirAll done: %+v", err) }() return fs.mkdirAll(filename, perm) } // Lstat implements the billy.Filesystem interface for FS. func (fs *FS) Lstat(filename string) (fi os.FileInfo, err error) { fs.log.CDebugf(fs.ctx, "Lstat %s", filename) defer func() { fs.deferLog.CDebugf(fs.ctx, "Lstat done: %+v", err) err = translateErr(err) }() n, _, base, err := fs.lookupParent(filename) if err != nil { return nil, err } _, ei, err := fs.config.KBFSOps().Lookup(fs.ctx, n, base) if err != nil { return nil, err } return &FileInfo{ fs: fs, ei: ei, name: base, }, nil } // Symlink implements the billy.Filesystem interface for FS. func (fs *FS) Symlink(target, link string) (err error) { fs.log.CDebugf(fs.ctx, "Symlink target=%s link=%s", target, link) defer func() { fs.deferLog.CDebugf(fs.ctx, "Symlink done: %+v", err) err = translateErr(err) }() n, _, base, err := fs.lookupParent(link) if err != nil { return err } _, err = fs.config.KBFSOps().CreateLink(fs.ctx, n, base, target) return err } // Readlink implements the billy.Filesystem interface for FS. func (fs *FS) Readlink(link string) (target string, err error) { fs.log.CDebugf(fs.ctx, "Readlink %s", link) defer func() { fs.deferLog.CDebugf(fs.ctx, "Readlink done: %+v", err) err = translateErr(err) }() n, _, base, err := fs.lookupParent(link) if err != nil { return "", err } _, ei, err := fs.config.KBFSOps().Lookup(fs.ctx, n, base) if err != nil { return "", err } if ei.Type != libkbfs.Sym { return "", errors.Errorf("%s is not a symlink", link) } return ei.SymPath, nil } // Chmod implements the billy.Filesystem interface for FS. func (fs *FS) Chmod(name string, mode os.FileMode) (err error) { fs.log.CDebugf(fs.ctx, "Chmod %s %s", name, mode) defer func() { fs.deferLog.CDebugf(fs.ctx, "Chmod done: %+v", err) err = translateErr(err) }() n, _, err := fs.lookupOrCreateEntry(name, os.O_RDONLY, 0) if err != nil { return err } isExec := (mode & 0100) != 0 return fs.config.KBFSOps().SetEx(fs.ctx, n, isExec) } // Lchown implements the billy.Filesystem interface for FS. func (fs *FS) Lchown(name string, uid, gid int) error { // KBFS doesn't support ownership changes. fs.log.CDebugf(fs.ctx, "Ignoring Lchown %s %d %d", name, uid, gid) return nil } // Chown implements the billy.Filesystem interface for FS. func (fs *FS) Chown(name string, uid, gid int) error { // KBFS doesn't support ownership changes. fs.log.CDebugf(fs.ctx, "Ignoring Chown %s %d %d", name, uid, gid) return nil } // Chtimes implements the billy.Filesystem interface for FS. func (fs *FS) Chtimes(name string, atime time.Time, mtime time.Time) ( err error) { fs.log.CDebugf(fs.ctx, "Chtimes %s mtime=%s; ignoring atime=%s", name, mtime, atime) defer func() { fs.deferLog.CDebugf(fs.ctx, "Chtimes done: %+v", err) err = translateErr(err) }() n, _, err := fs.lookupOrCreateEntry(name, os.O_RDONLY, 0) if err != nil { return err } return fs.config.KBFSOps().SetMtime(fs.ctx, n, &mtime) } // Chroot implements the billy.Filesystem interface for FS. func (fs *FS) Chroot(p string) (newFS billy.Filesystem, err error) { fs.log.CDebugf(fs.ctx, "Chroot %s", p) defer func() { fs.deferLog.CDebugf(fs.ctx, "Chroot done: %+v", err) err = translateErr(err) }() // lookupOrCreateEntry doesn't handle "..", so we don't have to // worry about someone trying to break out of the jail since this // lookup will fail. n, _, err := fs.lookupOrCreateEntry(p, os.O_RDONLY, 0) if err != nil { return nil, err } return &FS{ ctx: fs.ctx, fsInner: &fsInner{ config: fs.config, root: n, h: fs.h, subdir: path.Clean(path.Join(fs.subdir, p)), log: fs.log, deferLog: fs.deferLog, // Original lock namespace plus '/' plus the subdir. lockNamespace: bytes.Join( [][]byte{fs.lockNamespace, []byte(p)}, []byte{'/'}), priority: fs.priority, }, }, nil } // Root implements the billy.Filesystem interface for FS. func (fs *FS) Root() string { return path.Join(fs.h.GetCanonicalPath(), fs.subdir) } // SyncAll syncs any outstanding buffered writes to the KBFS journal. func (fs *FS) SyncAll() error { return fs.config.KBFSOps().SyncAll(fs.ctx, fs.root.GetFolderBranch()) } // Config returns the underlying Config object of this FS. func (fs *FS) Config() libkbfs.Config { return fs.config } // SetLockNamespace sets the namespace used in locking. func (fs *FS) SetLockNamespace(lockNamespace []byte) { fs.lockNamespace = make([]byte, len(lockNamespace)) copy(fs.lockNamespace, lockNamespace) } // GetLockNamespace returns the namespace used in locking. func (fs *FS) GetLockNamespace() (lockNamespace []byte) { return fs.lockNamespace } // SubscribeToEvents causes *File objects constructed from this *FS to send // events to the channel at beginning of Lock and Unlock. The send is done // blockingly so caller needs to drain the channel properly or make it buffered // with enough size. func (fs *FS) SubscribeToEvents(ch chan<- FSEvent) { fs.eventsLock.Lock() defer fs.eventsLock.Unlock() fs.events[ch] = true } // UnsubscribeToEvents stops *File objects constructed from this *FS from // sending events to ch. It also closes ch. func (fs *FS) UnsubscribeToEvents(ch chan<- FSEvent) { fs.eventsLock.Lock() defer fs.eventsLock.Unlock() delete(fs.events, ch) close(ch) } func (fs *FS) sendEvents(e FSEvent) { fs.eventsLock.RLock() defer fs.eventsLock.RUnlock() for ch := range fs.events { ch <- e } } // WithContext returns a *FS based on fs, with its ctx replaced with ctx. func (fs *FS) WithContext(ctx context.Context) *FS { return &FS{ ctx: ctx, fsInner: fs.fsInner, } } // ToHTTPFileSystem calls fs.WithCtx with ctx to create a *FS with the new ctx, // and returns a wrapper around it that satisfies the http.FileSystem // interface. func (fs *FS) ToHTTPFileSystem(ctx context.Context) http.FileSystem { return httpFileSystem{fs: fs.WithContext(ctx)} }
1
18,951
Whoa, TIL about this syntax.
keybase-kbfs
go
@@ -34,12 +34,14 @@ type fakeHandler struct { } func (h fakeHandler) Handle(context.Context, *yarpc.Request, *yarpc.Buffer) (*yarpc.Response, *yarpc.Buffer, error) { - errorInfo := yarpcerror.ExtractInfo(h.applicationErr) - res := &yarpc.Response{ApplicationErrorInfo: &errorInfo} if h.applicationErr != nil { + errorInfo := yarpcerror.ExtractInfo(h.applicationErr) + res := &yarpc.Response{ApplicationErrorInfo: &errorInfo} return res, nil, nil + } else if h.err != nil { + return nil, nil, h.err } - return res, nil, h.err + return &yarpc.Response{}, &yarpc.Buffer{}, nil } func (h fakeHandler) HandleStream(*yarpc.ServerStream) error {
1
// Copyright (c) 2018 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package internalyarpcobservability import ( "context" "time" "go.uber.org/yarpc/v2" "go.uber.org/yarpc/v2/yarpcerror" ) type fakeHandler struct { err error applicationErr error } func (h fakeHandler) Handle(context.Context, *yarpc.Request, *yarpc.Buffer) (*yarpc.Response, *yarpc.Buffer, error) { errorInfo := yarpcerror.ExtractInfo(h.applicationErr) res := &yarpc.Response{ApplicationErrorInfo: &errorInfo} if h.applicationErr != nil { return res, nil, nil } return res, nil, h.err } func (h fakeHandler) HandleStream(*yarpc.ServerStream) error { return h.err } type fakeOutbound struct { err error applicationErr error } func (o fakeOutbound) Call(context.Context, *yarpc.Request, *yarpc.Buffer) (*yarpc.Response, *yarpc.Buffer, error) { if o.err != nil { return nil, nil, o.err } errorInfo := yarpcerror.ExtractInfo(o.applicationErr) return &yarpc.Response{ApplicationErrorInfo: &errorInfo}, nil, nil } func (o fakeOutbound) CallStream(ctx context.Context, request *yarpc.Request) (*yarpc.ClientStream, error) { if o.err != nil { return nil, o.err } return yarpc.NewClientStream(&fakeStream{ ctx: ctx, request: request, }) } var _ yarpc.Stream = (*fakeStream)(nil) type fakeStream struct { ctx context.Context request *yarpc.Request } func (s *fakeStream) Context() context.Context { return s.ctx } func (s *fakeStream) Request() *yarpc.Request { return s.request } func (s *fakeStream) SendMessage(context.Context, *yarpc.Buffer) error { return nil } func (s *fakeStream) ReceiveMessage(context.Context) (*yarpc.Buffer, error) { return nil, nil } func (s *fakeStream) Close(context.Context) error { return nil } func stubTime() func() { prev := _timeNow _timeNow = func() time.Time { return time.Time{} } return func() { _timeNow = prev } }
1
18,358
This is a smell. When is this valid? Perhaps this should be `nil, nil, ErrNoResponse` or some such. While it is usually invalid to have neither a return value or error, we rarely check for the case in other idiomatic Go. Nothing wrong with extra defenses here, but think about it.
yarpc-yarpc-go
go
@@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +load_from = 'https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/faster_rcnn_r50_fpn_1x_20181010-3d1b3351.pth' # noqa
1
1
18,919
This URL is outdated, add a `TODO` to fix it.
open-mmlab-mmdetection
py
@@ -23,10 +23,13 @@ type ( Proto() *iotextypes.ActionCore LoadProto(pbAct *iotextypes.ActionCore) error SetNonce(n uint64) + ChainID() uint32 + SetChainID(chainID uint32) } envelope struct { version uint32 + chainID uint32 nonce uint64 gasLimit uint64 gasPrice *big.Int
1
package action import ( "math/big" "github.com/iotexproject/iotex-proto/golang/iotextypes" "github.com/pkg/errors" "github.com/iotexproject/iotex-core/pkg/log" ) type ( // Envelope defines an envelope wrapped on action with some envelope metadata. Envelope interface { Version() uint32 Nonce() uint64 GasLimit() uint64 GasPrice() *big.Int Destination() (string, bool) Cost() (*big.Int, error) IntrinsicGas() (uint64, error) Action() Action Proto() *iotextypes.ActionCore LoadProto(pbAct *iotextypes.ActionCore) error SetNonce(n uint64) } envelope struct { version uint32 nonce uint64 gasLimit uint64 gasPrice *big.Int payload actionPayload } ) // Version returns the version func (elp *envelope) Version() uint32 { return elp.version } // Nonce returns the nonce func (elp *envelope) Nonce() uint64 { return elp.nonce } // Destination returns the destination address func (elp *envelope) Destination() (string, bool) { r, ok := elp.payload.(hasDestination) if !ok { return "", false } return r.Destination(), true } // GasLimit returns the gas limit func (elp *envelope) GasLimit() uint64 { return elp.gasLimit } // GasPrice returns the gas price func (elp *envelope) GasPrice() *big.Int { p := &big.Int{} if elp.gasPrice == nil { return p } return p.Set(elp.gasPrice) } // Cost returns cost of actions func (elp *envelope) Cost() (*big.Int, error) { return elp.payload.Cost() } // IntrinsicGas returns intrinsic gas of action. func (elp *envelope) IntrinsicGas() (uint64, error) { return elp.payload.IntrinsicGas() } // Action returns the action payload. func (elp *envelope) Action() Action { return elp.payload } // Proto convert Envelope to protobuf format. func (elp *envelope) Proto() *iotextypes.ActionCore { actCore := &iotextypes.ActionCore{ Version: elp.version, Nonce: elp.nonce, GasLimit: elp.gasLimit, } if elp.gasPrice != nil { actCore.GasPrice = elp.gasPrice.String() } // TODO assert each action switch act := elp.Action().(type) { case *Transfer: actCore.Action = &iotextypes.ActionCore_Transfer{Transfer: act.Proto()} case *Execution: actCore.Action = &iotextypes.ActionCore_Execution{Execution: act.Proto()} case *GrantReward: actCore.Action = &iotextypes.ActionCore_GrantReward{GrantReward: act.Proto()} case *ClaimFromRewardingFund: actCore.Action = &iotextypes.ActionCore_ClaimFromRewardingFund{ClaimFromRewardingFund: act.Proto()} case *DepositToRewardingFund: actCore.Action = &iotextypes.ActionCore_DepositToRewardingFund{DepositToRewardingFund: act.Proto()} case *PutPollResult: actCore.Action = &iotextypes.ActionCore_PutPollResult{PutPollResult: act.Proto()} case *CreateStake: actCore.Action = &iotextypes.ActionCore_StakeCreate{StakeCreate: act.Proto()} case *Unstake: actCore.Action = &iotextypes.ActionCore_StakeUnstake{StakeUnstake: act.Proto()} case *WithdrawStake: actCore.Action = &iotextypes.ActionCore_StakeWithdraw{StakeWithdraw: act.Proto()} case *DepositToStake: actCore.Action = &iotextypes.ActionCore_StakeAddDeposit{StakeAddDeposit: act.Proto()} case *Restake: actCore.Action = &iotextypes.ActionCore_StakeRestake{StakeRestake: act.Proto()} case *ChangeCandidate: actCore.Action = &iotextypes.ActionCore_StakeChangeCandidate{StakeChangeCandidate: act.Proto()} case *TransferStake: actCore.Action = &iotextypes.ActionCore_StakeTransferOwnership{StakeTransferOwnership: act.Proto()} case *CandidateRegister: actCore.Action = &iotextypes.ActionCore_CandidateRegister{CandidateRegister: act.Proto()} case *CandidateUpdate: actCore.Action = &iotextypes.ActionCore_CandidateUpdate{CandidateUpdate: act.Proto()} default: log.S().Panicf("Cannot convert type of action %T.\r\n", act) } return actCore } // LoadProto loads fields from protobuf format. func (elp *envelope) LoadProto(pbAct *iotextypes.ActionCore) error { if pbAct == nil { return errors.New("empty action proto to load") } if elp == nil { return errors.New("nil action to load proto") } *elp = envelope{} elp.version = pbAct.GetVersion() elp.nonce = pbAct.GetNonce() elp.gasLimit = pbAct.GetGasLimit() elp.gasPrice = &big.Int{} elp.gasPrice.SetString(pbAct.GetGasPrice(), 10) switch { case pbAct.GetTransfer() != nil: act := &Transfer{} if err := act.LoadProto(pbAct.GetTransfer()); err != nil { return err } elp.payload = act case pbAct.GetExecution() != nil: act := &Execution{} if err := act.LoadProto(pbAct.GetExecution()); err != nil { return err } elp.payload = act case pbAct.GetGrantReward() != nil: act := &GrantReward{} if err := act.LoadProto(pbAct.GetGrantReward()); err != nil { return err } elp.payload = act case pbAct.GetClaimFromRewardingFund() != nil: act := &ClaimFromRewardingFund{} if err := act.LoadProto(pbAct.GetClaimFromRewardingFund()); err != nil { return err } elp.payload = act case pbAct.GetDepositToRewardingFund() != nil: act := &DepositToRewardingFund{} if err := act.LoadProto(pbAct.GetDepositToRewardingFund()); err != nil { return err } elp.payload = act case pbAct.GetPutPollResult() != nil: act := &PutPollResult{} if err := act.LoadProto(pbAct.GetPutPollResult()); err != nil { return err } elp.payload = act case pbAct.GetStakeCreate() != nil: act := &CreateStake{} if err := act.LoadProto(pbAct.GetStakeCreate()); err != nil { return err } elp.payload = act case pbAct.GetStakeUnstake() != nil: act := &Unstake{} if err := act.LoadProto(pbAct.GetStakeUnstake()); err != nil { return err } elp.payload = act case pbAct.GetStakeWithdraw() != nil: act := &WithdrawStake{} if err := act.LoadProto(pbAct.GetStakeWithdraw()); err != nil { return err } elp.payload = act case pbAct.GetStakeAddDeposit() != nil: act := &DepositToStake{} if err := act.LoadProto(pbAct.GetStakeAddDeposit()); err != nil { return err } elp.payload = act case pbAct.GetStakeRestake() != nil: act := &Restake{} if err := act.LoadProto(pbAct.GetStakeRestake()); err != nil { return err } elp.payload = act case pbAct.GetStakeChangeCandidate() != nil: act := &ChangeCandidate{} if err := act.LoadProto(pbAct.GetStakeChangeCandidate()); err != nil { return err } elp.payload = act case pbAct.GetStakeTransferOwnership() != nil: act := &TransferStake{} if err := act.LoadProto(pbAct.GetStakeTransferOwnership()); err != nil { return err } elp.payload = act case pbAct.GetCandidateRegister() != nil: act := &CandidateRegister{} if err := act.LoadProto(pbAct.GetCandidateRegister()); err != nil { return err } elp.payload = act case pbAct.GetCandidateUpdate() != nil: act := &CandidateUpdate{} if err := act.LoadProto(pbAct.GetCandidateUpdate()); err != nil { return err } elp.payload = act default: return errors.Errorf("no applicable action to handle proto type %T", pbAct.Action) } return nil } // SetNonce sets the nonce value func (elp *envelope) SetNonce(n uint64) { elp.nonce = n }
1
23,636
`ChainID()` is a getter method, move up to `Nonce()`
iotexproject-iotex-core
go
@@ -163,6 +163,10 @@ public class FlowTriggerScheduler { flowId, flowTrigger, submitUser, quartzTriggers.isEmpty() ? null : quartzTriggers.get(0), isPaused, flow.isLocked()); } catch (final Exception ex) { + if (QuartzScheduler.isSerializationBug(ex) && this.scheduler.enableSerializationHack()) { + logger.info("Enable serialization hack"); + return getScheduledFlowTriggerJobs(); + } logger.error("Unable to get flow trigger by job key {}", jobKey, ex); scheduledFlowTrigger = null; }
1
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.flowtrigger.quartz; import static java.util.Objects.requireNonNull; import azkaban.flow.Flow; import azkaban.project.FlowLoaderUtils; import azkaban.project.FlowTrigger; import azkaban.project.Project; import azkaban.project.ProjectLoader; import azkaban.project.ProjectManager; import azkaban.project.ProjectManagerException; import azkaban.scheduler.QuartzJobDescription; import azkaban.scheduler.QuartzScheduler; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; import com.google.gson.GsonBuilder; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TimeZone; import javax.inject.Inject; import javax.inject.Singleton; import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.JobKey; import org.quartz.Scheduler; import org.quartz.SchedulerException; import org.quartz.Trigger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton public class FlowTriggerScheduler { private static final Logger logger = LoggerFactory.getLogger(FlowTriggerScheduler.class); private final ProjectLoader projectLoader; private final QuartzScheduler scheduler; private final ProjectManager projectManager; @Inject public FlowTriggerScheduler(final ProjectLoader projectLoader, final QuartzScheduler scheduler, final ProjectManager projectManager) { this.projectLoader = requireNonNull(projectLoader); this.scheduler = requireNonNull(scheduler); this.projectManager = requireNonNull(projectManager); } /** * Schedule flows containing flow triggers for this project. */ public void schedule(final Project project, final String submitUser) throws ProjectManagerException, IOException, SchedulerException { for (final Flow flow : project.getFlows()) { //todo chengren311: we should validate embedded flow shouldn't have flow trigger defined. if (flow.isEmbeddedFlow()) { // skip scheduling embedded flow since embedded flow are not allowed to have flow trigger continue; } final String flowFileName = flow.getId() + ".flow"; final int latestFlowVersion = this.projectLoader .getLatestFlowVersion(flow.getProjectId(), flow .getVersion(), flowFileName); if (latestFlowVersion > 0) { final File tempDir = Files.createTempDir(); final File flowFile; try { flowFile = this.projectLoader .getUploadedFlowFile(project.getId(), project.getVersion(), flowFileName, latestFlowVersion, tempDir); final FlowTrigger flowTrigger = FlowLoaderUtils.getFlowTriggerFromYamlFile(flowFile); if (flowTrigger != null) { final Map<String, Object> contextMap = ImmutableMap .of(FlowTriggerQuartzJob.SUBMIT_USER, submitUser, FlowTriggerQuartzJob.FLOW_TRIGGER, flowTrigger, FlowTriggerQuartzJob.FLOW_ID, flow.getId(), FlowTriggerQuartzJob.FLOW_VERSION, latestFlowVersion, FlowTriggerQuartzJob.PROJECT_ID, project.getId()); final boolean scheduleSuccess = this.scheduler .scheduleJobIfAbsent(flowTrigger.getSchedule().getCronExpression(), TimeZone.getTimeZone(flowTrigger.getSchedule().getTimeZone()), new QuartzJobDescription (FlowTriggerQuartzJob.class, FlowTriggerQuartzJob.JOB_NAME, generateGroupName(flow), contextMap)); if (scheduleSuccess) { logger.info("Successfully registered flow {}.{} to scheduler", project.getName(), flow.getId()); } else { logger.info("Fail to register a duplicate flow {}.{} to scheduler", project.getName(), flow.getId()); } } } catch (final SchedulerException | IOException ex) { logger.error("Error in registering flow {}.{}", project.getName(), flow.getId(), ex); throw ex; } finally { FlowLoaderUtils.cleanUpDir(tempDir); } } } } public boolean pauseFlowTriggerIfPresent(final int projectId, final String flowId) throws SchedulerException { return this.scheduler .pauseJobIfPresent(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(projectId, flowId)); } public boolean resumeFlowTriggerIfPresent(final int projectId, final String flowId) throws SchedulerException { return this.scheduler .resumeJobIfPresent(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(projectId, flowId)); } /** * Retrieve the list of scheduled flow triggers from quartz database */ public List<ScheduledFlowTrigger> getScheduledFlowTriggerJobs() { try { final Scheduler quartzScheduler = this.scheduler.getScheduler(); final List<String> groupNames = quartzScheduler.getJobGroupNames(); final List<ScheduledFlowTrigger> flowTriggerJobDetails = new ArrayList<>(); for (final String groupName : groupNames) { final JobKey jobKey = new JobKey(FlowTriggerQuartzJob.JOB_NAME, groupName); ScheduledFlowTrigger scheduledFlowTrigger = null; try { final JobDetail job = quartzScheduler.getJobDetail(jobKey); final JobDataMap jobDataMap = job.getJobDataMap(); final String flowId = jobDataMap.getString(FlowTriggerQuartzJob.FLOW_ID); final int projectId = jobDataMap.getInt(FlowTriggerQuartzJob.PROJECT_ID); final FlowTrigger flowTrigger = (FlowTrigger) jobDataMap .get(FlowTriggerQuartzJob.FLOW_TRIGGER); final String submitUser = jobDataMap.getString(FlowTriggerQuartzJob.SUBMIT_USER); final List<? extends Trigger> quartzTriggers = quartzScheduler.getTriggersOfJob(jobKey); final boolean isPaused = this.scheduler .isJobPaused(FlowTriggerQuartzJob.JOB_NAME, groupName); final Project project = projectManager.getProject(projectId); final Flow flow = project.getFlow(flowId); scheduledFlowTrigger = new ScheduledFlowTrigger(projectId, this.projectManager.getProject(projectId).getName(), flowId, flowTrigger, submitUser, quartzTriggers.isEmpty() ? null : quartzTriggers.get(0), isPaused, flow.isLocked()); } catch (final Exception ex) { logger.error("Unable to get flow trigger by job key {}", jobKey, ex); scheduledFlowTrigger = null; } flowTriggerJobDetails.add(scheduledFlowTrigger); } return flowTriggerJobDetails; } catch (final Exception ex) { logger.error("Unable to get scheduled flow triggers", ex); return new ArrayList<>(); } } /** * Unschedule all possible flows in a project */ public void unschedule(final Project project) throws SchedulerException { for (final Flow flow : project.getFlows()) { if (!flow.isEmbeddedFlow()) { try { if (this.scheduler .unscheduleJob(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(flow))) { logger.info("Flow {}.{} unregistered from scheduler", project.getName(), flow.getId()); } } catch (final SchedulerException e) { logger.error("Fail to unregister flow from scheduler {}.{}", project.getName(), flow.getId(), e); throw e; } } } } private String generateGroupName(final Flow flow) { return generateGroupName(flow.getProjectId(), flow.getId()); } private String generateGroupName(final int projectId, final String flowId) { return String.valueOf(projectId) + "." + flowId; } public void start() throws SchedulerException { this.scheduler.start(); } public void shutdown() throws SchedulerException { this.scheduler.shutdown(); } public static class ScheduledFlowTrigger { private final int projectId; private final String projectName; private final String flowId; private final FlowTrigger flowTrigger; private final Trigger quartzTrigger; private final String submitUser; private final boolean isPaused; private final boolean isLocked; public ScheduledFlowTrigger(final int projectId, final String projectName, final String flowId, final FlowTrigger flowTrigger, final String submitUser, final Trigger quartzTrigger, final boolean isPaused, final boolean isLocked) { this.projectId = projectId; this.projectName = projectName; this.flowId = flowId; this.flowTrigger = flowTrigger; this.submitUser = submitUser; this.quartzTrigger = quartzTrigger; this.isPaused = isPaused; this.isLocked = isLocked; } public boolean isPaused() { return this.isPaused; } public int getProjectId() { return this.projectId; } public String getProjectName() { return this.projectName; } public String getFlowId() { return this.flowId; } public FlowTrigger getFlowTrigger() { return this.flowTrigger; } public String getDependencyListJson() { return new GsonBuilder().setPrettyPrinting().create() .toJson(this.flowTrigger.getDependencies()); } public Trigger getQuartzTrigger() { return this.quartzTrigger; } public String getSubmitUser() { return this.submitUser; } public boolean isLocked() { return this.isLocked; } } }
1
19,004
If this condition is not met, shall we just throw the exception rather than logging it? @li-ygerchikov Your thoughts?
azkaban-azkaban
java
@@ -57,6 +57,7 @@ REQUIRED_PACKAGES = [ 'mock>=2.0.0', 'parameterized>=0.6.1', 'simple-crypt>=4.1.7', + 'ruamel.yaml' ] if sys.version_info < (2, 7):
1
#!/usr/bin/env python # Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Setup script for Forseti Security tools.""" import os import sys from setuptools import find_packages from setuptools import setup from setuptools.command.install import install from setup.util import build_protos import google.cloud.forseti FORSETI_VERSION = google.cloud.forseti.__version__ NAMESPACE_PACKAGES = [ 'google', 'google.cloud', 'google.cloud.forseti' ] REQUIRED_PACKAGES = [ 'anytree>=2.1.4', 'futures>=3.0.5', 'google-api-python-client>=1.6.1', 'Jinja2>=2.9.5', 'MySQL-python>=1.2.5', 'netaddr>=0.7.19', 'PyYAML>=3.12', 'ratelimiter>=1.1.0', 'retrying>=1.3.3', 'requests[security]>=2.18.4', 'sendgrid>=3.6.3', 'SQLAlchemy>=1.1.9', 'protobuf>=3.2.0', 'pygraph>=0.2.1', 'unicodecsv>=0.14.1', 'google-apputils>=0.4.2', 'grpcio', 'grpcio-tools', 'python-gflags>=3.1.1', 'mock>=2.0.0', 'parameterized>=0.6.1', 'simple-crypt>=4.1.7', ] if sys.version_info < (2, 7): sys.exit('Sorry, Python < 2.7 is not supported.') if sys.version_info.major > 2: sys.exit('Sorry, Python 3 is not supported.') def build_forseti_protos(clean_only=False): """Clean and optionally Build protos. Args: clean_only (boolean): Whether to only clean previously built protos. """ abs_path = os.path.abspath(__file__) build_protos.clean(abs_path) if not clean_only: build_protos.make_proto(abs_path) class BuildProtosCommand(install): """A command to build protos in all children directories.""" def run(self): build_forseti_protos() class CleanProtosCommand(install): """A command to clean protos in all children directories.""" def run(self): build_forseti_protos(clean_only=True) class PostInstallCommand(install): """Post installation command.""" def run(self): build_forseti_protos() install.do_egg_install(self) setup( name='forseti-security', version=FORSETI_VERSION, description='Forseti Security tools', author='Google LLC.', author_email='[email protected]', url='https://github.com/GoogleCloudPlatform/forseti-security', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'License :: OSI Approved :: Apache Software License' ], cmdclass={ 'build_protos': BuildProtosCommand, 'clean_protos': CleanProtosCommand, 'install': PostInstallCommand, }, install_requires=REQUIRED_PACKAGES, setup_requires=REQUIRED_PACKAGES, tests_require=REQUIRED_PACKAGES, packages=find_packages(exclude=[ '*.tests', '*.tests.*', 'tests.*', 'tests']), include_package_data=True, package_data={ '': ['cloud/forseti/common/email_templates/*.jinja'] }, namespace_packages=NAMESPACE_PACKAGES, google_test_dir='tests', license='Apache 2.0', keywords='gcp google cloud platform security tools', entry_points={ 'console_scripts': [ 'forseti_enforcer = google.cloud.forseti.stubs:RunForsetiEnforcer', 'forseti_server = google.cloud.forseti.stubs:RunForsetiServer', 'forseti = google.cloud.forseti.stubs:RunForsetiCli', ] }, zip_safe=False, # Set to False: apputils doesn't like zip_safe eggs )
1
28,861
nit: alpha-sort `ruamel.yaml` in this list please
forseti-security-forseti-security
py
@@ -58,6 +58,15 @@ public class TableScanUtil { Function<FileScanTask, Long> weightFunc = file -> Math.max(file.length(), openFileCost); + Preconditions.checkArgument(splitSize > 0, + "Cannot create a plan with a split size that is not positive, splitSize = %s", splitSize); + + Preconditions.checkArgument(lookback > 0, + "Cannot create a plan with a lookback that is not positive, lookback = %s", lookback); + + Preconditions.checkArgument(openFileCost >= 0, + "Cannot create a plan with a openFileCost that is negative, openFileCost = %s", openFileCost); + return CloseableIterable.transform( CloseableIterable.combine( new BinPacking.PackingIterable<>(splitFiles, splitSize, lookback, weightFunc, true),
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.util; import java.util.function.Function; import org.apache.iceberg.BaseCombinedScanTask; import org.apache.iceberg.CombinedScanTask; import org.apache.iceberg.FileScanTask; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.relocated.com.google.common.base.Preconditions; import org.apache.iceberg.relocated.com.google.common.collect.FluentIterable; public class TableScanUtil { private TableScanUtil() { } public static boolean hasDeletes(CombinedScanTask task) { return task.files().stream().anyMatch(TableScanUtil::hasDeletes); } public static boolean hasDeletes(FileScanTask task) { return !task.deletes().isEmpty(); } public static CloseableIterable<FileScanTask> splitFiles(CloseableIterable<FileScanTask> tasks, long splitSize) { Preconditions.checkArgument(splitSize > 0, "Invalid split size (negative or 0): %s", splitSize); Iterable<FileScanTask> splitTasks = FluentIterable .from(tasks) .transformAndConcat(input -> input.split(splitSize)); // Capture manifests which can be closed after scan planning return CloseableIterable.combine(splitTasks, tasks); } public static CloseableIterable<CombinedScanTask> planTasks(CloseableIterable<FileScanTask> splitFiles, long splitSize, int lookback, long openFileCost) { Preconditions.checkArgument(splitSize > 0, "Invalid split size (negative or 0): %s", splitSize); Preconditions.checkArgument(lookback > 0, "Invalid split planning lookback (negative or 0): %s", lookback); Preconditions.checkArgument(openFileCost >= 0, "Invalid file open cost (negative): %s", openFileCost); Function<FileScanTask, Long> weightFunc = file -> Math.max(file.length(), openFileCost); return CloseableIterable.transform( CloseableIterable.combine( new BinPacking.PackingIterable<>(splitFiles, splitSize, lookback, weightFunc, true), splitFiles), BaseCombinedScanTask::new); } }
1
37,513
Remove this because we already added it in another pr
apache-iceberg
java
@@ -79,7 +79,13 @@ public class ShortcutManager { try { if (shortcutDTO.getIcon() != null) { - File file = new File(shortcutDTO.getIcon()); + File file; + try { + file = new File(shortcutDTO.getIcon()); + } catch (IllegalArgumentException e) { + // fallback to phoenicis.png + file = new File(getClass().getResource("phoenicis.png").toExternalForm()); + } if (file.exists()) { FileUtils.copyFile(file, iconFile); }
1
/* * Copyright (C) 2015-2017 PÂRIS Quentin * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package org.phoenicis.library; import com.fasterxml.jackson.databind.ObjectMapper; import jdk.nashorn.api.scripting.ScriptObjectMirror; import org.apache.commons.io.FileUtils; import org.phoenicis.configuration.security.Safe; import org.phoenicis.library.dto.ShortcutDTO; import org.phoenicis.library.dto.ShortcutInfoDTO; import org.phoenicis.scripts.interpreter.InteractiveScriptSession; import org.phoenicis.scripts.interpreter.ScriptInterpreter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.util.function.Consumer; @Safe public class ShortcutManager { private static final Logger LOGGER = LoggerFactory.getLogger(ShortcutManager.class); private static final String ENCODING = "UTF-8"; private final String shortcutDirectory; private final LibraryManager libraryManager; private final ScriptInterpreter scriptInterpreter; private ObjectMapper objectMapper; private final String desktopShortcutDirectory; ShortcutManager(String shortcutDirectory, String desktopShortcutDirectory, LibraryManager libraryManager, ScriptInterpreter scriptInterpreter, ObjectMapper objectMapper) { this.shortcutDirectory = shortcutDirectory; this.desktopShortcutDirectory = desktopShortcutDirectory; this.libraryManager = libraryManager; this.scriptInterpreter = scriptInterpreter; this.objectMapper = objectMapper; } public void createShortcut(ShortcutDTO shortcutDTO) { final ShortcutInfoDTO shortcutInfo = shortcutDTO.getInfo(); final String baseName = shortcutDTO.getId(); final File shortcutDirectoryFile = new File(this.shortcutDirectory); final File infoFile = new File(shortcutDirectoryFile, baseName + ".info"); final File scriptFile = new File(shortcutDirectoryFile, baseName + ".shortcut"); final File iconFile = new File(shortcutDirectoryFile, baseName + ".icon"); final File miniatureFile = new File(shortcutDirectoryFile, baseName + ".miniature"); if (!shortcutDirectoryFile.exists()) { shortcutDirectoryFile.mkdirs(); } try { this.objectMapper.writeValue(infoFile, shortcutInfo); FileUtils.writeStringToFile(scriptFile, shortcutDTO.getScript(), ENCODING); try { if (shortcutDTO.getIcon() != null) { File file = new File(shortcutDTO.getIcon()); if (file.exists()) { FileUtils.copyFile(file, iconFile); } } } catch (IOException | IllegalArgumentException e) { LOGGER.warn("Error while creating shortcut icon", e); } try { if (shortcutDTO.getMiniature() != null) { File file = new File(shortcutDTO.getMiniature()); if (file.exists()) { FileUtils.copyFile(file, miniatureFile); } } } catch (IOException | IllegalArgumentException e) { LOGGER.warn("Error while creating miniature", e); } } catch (IOException e) { LOGGER.warn("Error while creating shortcut", e); } finally { libraryManager.refresh(); } if (this.desktopShortcutDirectory != null) { final File desktopShortcutDirectoryFile = new File(this.desktopShortcutDirectory); final File desktopShortcutFile = new File(desktopShortcutDirectoryFile, baseName + ".desktop"); try { final String content = "[Desktop Entry]\n" + "Name=" + shortcutInfo.getName() + "\n" + "Type=Application\n" + "Icon=" + miniatureFile.getAbsolutePath() + "\n" + "Exec=phoenicis-cli -run \"" + shortcutInfo.getName() + "\""; FileUtils.writeStringToFile(desktopShortcutFile, content, ENCODING); } catch (IOException e) { LOGGER.warn("Error while creating .desktop", e); } } } public void uninstallFromShortcut(ShortcutDTO shortcutDTO, Consumer<Exception> errorCallback) { final InteractiveScriptSession interactiveScriptSession = scriptInterpreter.createInteractiveSession(); interactiveScriptSession.eval("include([\"engines\", \"wine\", \"shortcuts\", \"reader\"]);", ignored -> interactiveScriptSession.eval("new ShortcutReader()", output -> { final ScriptObjectMirror shortcutReader = (ScriptObjectMirror) output; shortcutReader.callMember("of", shortcutDTO); shortcutReader.callMember("uninstall"); }, errorCallback), errorCallback); } public void deleteShortcut(ShortcutDTO shortcutDTO) { final String baseName = shortcutDTO.getId(); final File shortcutDirectory = new File(this.shortcutDirectory); final File infoFile = new File(shortcutDirectory, baseName + ".info"); final File scriptFile = new File(shortcutDirectory, baseName + ".shortcut"); final File iconFile = new File(shortcutDirectory, baseName + ".icon"); final File miniatureFile = new File(shortcutDirectory, baseName + ".miniature"); if (infoFile.exists()) { infoFile.delete(); } if (scriptFile.exists()) { scriptFile.delete(); } if (iconFile.exists()) { iconFile.delete(); } if (miniatureFile.exists()) { miniatureFile.delete(); } if (this.desktopShortcutDirectory != null) { final File desktopShortcutDirectoryFile = new File(this.desktopShortcutDirectory); final File desktopShortcutFile = new File(desktopShortcutDirectoryFile, baseName + ".desktop"); if (desktopShortcutFile.exists()) { desktopShortcutFile.delete(); } } libraryManager.refresh(); } public void updateShortcut(ShortcutDTO shortcutDTO) { final String baseName = shortcutDTO.getId(); final File shortcutDirectory = new File(this.shortcutDirectory); // backup icon if it didn't change (deleteShortcut will delete it -> icon lost after shortcut update) final File iconFile = new File(shortcutDirectory, baseName + ".icon"); final File iconBackup = new File(shortcutDirectory, baseName + ".icon_backup"); final URI shortcutIcon = shortcutDTO.getIcon(); if (shortcutIcon != null && shortcutIcon.getPath() != null) { final boolean keepIcon = shortcutIcon.getPath().equals(iconFile.getPath()); if (keepIcon) { try { Files.move(iconFile.toPath(), iconBackup.toPath()); shortcutDTO = new ShortcutDTO.Builder(shortcutDTO).withIcon(iconBackup.toURI()).build(); } catch (IOException e) { LOGGER.error("Could not backup icon.", e); } } } // backup miniature if it didn't change (deleteShortcut will delete it -> miniature lost after shortcut update) final File miniatureFile = new File(shortcutDirectory, baseName + ".miniature"); final File miniatureBackup = new File(shortcutDirectory, baseName + ".miniature_backup"); final URI shortcutMiniature = shortcutDTO.getMiniature(); if (shortcutMiniature != null && shortcutMiniature.getPath() != null) { final boolean keepMiniature = shortcutMiniature.getPath().equals(miniatureFile.getPath()); if (keepMiniature) { try { Files.move(miniatureFile.toPath(), miniatureBackup.toPath()); shortcutDTO = new ShortcutDTO.Builder(shortcutDTO).withMiniature(miniatureBackup.toURI()).build(); } catch (IOException e) { LOGGER.error("Could not backup miniature.", e); } } } deleteShortcut(shortcutDTO); createShortcut(shortcutDTO); // delete backups if (iconBackup.exists()) { iconBackup.delete(); } if (miniatureBackup.exists()) { miniatureBackup.delete(); } } }
1
12,521
Can we maybe move the `File` creation to a new method? I mean both changes look equal to me with the difference of two parts, which could be specified using two input parameters.
PhoenicisOrg-phoenicis
java
@@ -9,6 +9,8 @@ class ApiToken < ActiveRecord::Base # TODO validates :access_token, presence: true validates :approval_id, presence: true + validates :access_token, presence: true, allow_blank: true, length: { minimum: 0, maximum: 255 } + validates :user, presence: true, allow_blank: true scope :unexpired, -> { where('expires_at >= ?', Time.now) } scope :expired, -> { where('expires_at < ?', Time.now) }
1
class ApiToken < ActiveRecord::Base has_paper_trail before_create :generate_token belongs_to :approval, class_name: 'Approvals::Individual' has_one :proposal, through: :approval has_one :user, through: :approval # TODO validates :access_token, presence: true validates :approval_id, presence: true scope :unexpired, -> { where('expires_at >= ?', Time.now) } scope :expired, -> { where('expires_at < ?', Time.now) } scope :unused, -> { where(used_at: nil) } scope :fresh, -> { unused.unexpired } def used? !!self.used_at end # @todo: validate presence of expires_at def expired? self.expires_at && self.expires_at < Time.now end def use! self.update_attributes!(used_at: Time.now) end private def generate_token begin self.access_token = SecureRandom.hex end while self.class.exists?(access_token: access_token) self.expires_at ||= Time.now + 7.days end end
1
13,884
I think we want to require the association with the user...?
18F-C2
rb
@@ -110,8 +110,17 @@ func (smsg *SignedMessage) VerifySignature() error { } // OnChainLen returns the amount of bytes used to represent the message on chain. +// Dragons: verify this is the correct way to determine a signed messages size on chain. func (smsg *SignedMessage) OnChainLen() uint32 { - panic("byteme") + msgBits, err := encoding.Encode(smsg.Message) + if err != nil { + panic(err) + } + sigBits, err := encoding.Encode(smsg.Signature) + if err != nil { + panic(err) + } + return uint32(len(msgBits) + len(sigBits)) } func (smsg *SignedMessage) String() string {
1
package types import ( "bytes" "encoding/json" "fmt" blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" cbor "github.com/ipfs/go-ipld-cbor" ipld "github.com/ipfs/go-ipld-format" "github.com/pkg/errors" "github.com/filecoin-project/go-filecoin/internal/pkg/constants" "github.com/filecoin-project/go-filecoin/internal/pkg/crypto" "github.com/filecoin-project/go-filecoin/internal/pkg/encoding" ) // SignedMessage contains a message and its signature // TODO do not export these fields as it increases the chances of producing a // `SignedMessage` with an empty signature. type SignedMessage struct { // control field for encoding struct as an array _ struct{} `cbor:",toarray"` Message UnsignedMessage `json:"meteredMessage"` Signature crypto.Signature `json:"signature"` // Pay attention to Equals() if updating this struct. } // NewSignedMessage accepts a message `msg` and a signer `s`. NewSignedMessage returns a `SignedMessage` containing // a signature derived from the serialized `msg` and `msg.From` func NewSignedMessage(msg UnsignedMessage, s Signer) (*SignedMessage, error) { msgData, err := msg.Marshal() if err != nil { return nil, err } sig, err := s.SignBytes(msgData, msg.From) if err != nil { return nil, err } return &SignedMessage{ Message: msg, Signature: sig, }, nil } // UnwrapSigned returns the unsigned messages from a slice of signed messages. func UnwrapSigned(smsgs []*SignedMessage) []*UnsignedMessage { unsigned := make([]*UnsignedMessage, len(smsgs)) for i, sm := range smsgs { unsigned[i] = &sm.Message } return unsigned } // Unmarshal a SignedMessage from the given bytes. func (smsg *SignedMessage) Unmarshal(b []byte) error { return encoding.Decode(b, smsg) } // Marshal the SignedMessage into bytes. func (smsg *SignedMessage) Marshal() ([]byte, error) { return encoding.Encode(smsg) } // Cid returns the canonical CID for the SignedMessage. func (smsg *SignedMessage) Cid() (cid.Cid, error) { obj, err := smsg.ToNode() if err != nil { return cid.Undef, errors.Wrap(err, "failed to marshal to cbor") } return obj.Cid(), nil } // ToNode converts the SignedMessage to an IPLD node. func (smsg *SignedMessage) ToNode() (ipld.Node, error) { data, err := encoding.Encode(smsg) if err != nil { return nil, err } c, err := constants.DefaultCidBuilder.Sum(data) if err != nil { return nil, err } blk, err := blocks.NewBlockWithCid(data, c) if err != nil { return nil, err } obj, err := cbor.DecodeBlock(blk) if err != nil { return nil, err } return obj, nil } // VerifySignature returns true iff the signature is valid for the message content and from address. func (smsg *SignedMessage) VerifySignature() error { bmsg, err := smsg.Message.Marshal() if err != nil { return err } return crypto.ValidateSignature(bmsg, smsg.Message.From, smsg.Signature) } // OnChainLen returns the amount of bytes used to represent the message on chain. func (smsg *SignedMessage) OnChainLen() uint32 { panic("byteme") } func (smsg *SignedMessage) String() string { errStr := "(error encoding SignedMessage)" cid, err := smsg.Cid() if err != nil { return errStr } js, err := json.MarshalIndent(smsg, "", " ") if err != nil { return errStr } return fmt.Sprintf("SignedMessage cid=[%v]: %s", cid, string(js)) } // Equals tests whether two signed messages are equal. func (smsg *SignedMessage) Equals(other *SignedMessage) bool { return smsg.Message.Equals(&other.Message) && smsg.Signature.Type == other.Signature.Type && bytes.Equal(smsg.Signature.Data, other.Signature.Data) }
1
23,004
I am unsure the below implementation is correct, but need this method to not panic else it borks the chain-validation tests.
filecoin-project-venus
go
@@ -19,7 +19,7 @@ A wrapper class for Spark Column to behave similar to pandas Series. """ import re import inspect -from collections import Iterable +from collections import Iterable, OrderedDict from functools import partial, wraps from typing import Any, Generic, List, Optional, Tuple, TypeVar, Union
1
# # Copyright (C) 2019 Databricks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ A wrapper class for Spark Column to behave similar to pandas Series. """ import re import inspect from collections import Iterable from functools import partial, wraps from typing import Any, Generic, List, Optional, Tuple, TypeVar, Union import numpy as np import pandas as pd from pandas.core.accessor import CachedAccessor from pandas.io.formats.printing import pprint_thing from pyspark import sql as spark from pyspark.sql import functions as F, Column from pyspark.sql.types import BooleanType, StructType from pyspark.sql.window import Window from databricks import koalas as ks # For running doctests and reference resolution in PyCharm. from databricks.koalas.config import get_option from databricks.koalas.base import IndexOpsMixin from databricks.koalas.frame import DataFrame from databricks.koalas.generic import _Frame from databricks.koalas.internal import IndexMap, _InternalFrame from databricks.koalas.missing.series import _MissingPandasLikeSeries from databricks.koalas.plot import KoalasSeriesPlotMethods from databricks.koalas.utils import validate_arguments_and_invoke_function, scol_for from databricks.koalas.datetimes import DatetimeMethods from databricks.koalas.strings import StringMethods # This regular expression pattern is complied and defined here to avoid to compile the same # pattern every time it is used in _repr_ in Series. # This pattern basically seeks the footer string from Pandas' REPR_PATTERN = re.compile(r"Length: (?P<length>[0-9]+)") _flex_doc_SERIES = """ Return {desc} of series and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}`` Parameters ---------- other : Series or scalar value Returns ------- Series The result of the operation. See Also -------- Series.{reverse} {series_examples} """ _add_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.add(df.b) a 4.0 b NaN c 6.0 d NaN Name: a, dtype: float64 >>> df.a.radd(df.b) a 4.0 b NaN c 6.0 d NaN Name: a, dtype: float64 """ _sub_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.subtract(df.b) a 0.0 b NaN c 2.0 d NaN Name: a, dtype: float64 >>> df.a.rsub(df.b) a 0.0 b NaN c -2.0 d NaN Name: a, dtype: float64 """ _mul_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.multiply(df.b) a 4.0 b NaN c 8.0 d NaN Name: a, dtype: float64 >>> df.a.rmul(df.b) a 4.0 b NaN c 8.0 d NaN Name: a, dtype: float64 """ _div_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.divide(df.b) a 1.0 b NaN c 2.0 d NaN Name: a, dtype: float64 >>> df.a.rdiv(df.b) a 1.0 b NaN c 0.5 d NaN Name: a, dtype: float64 """ _pow_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.pow(df.b) a 4.0 b NaN c 16.0 d NaN Name: a, dtype: float64 >>> df.a.rpow(df.b) a 4.0 b NaN c 16.0 d NaN Name: a, dtype: float64 """ _mod_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.mod(df.b) a 0.0 b NaN c 0.0 d NaN Name: a, dtype: float64 >>> df.a.rmod(df.b) a 0.0 b NaN c 2.0 d NaN Name: a, dtype: float64 """ _floordiv_example_SERIES = """ Examples -------- >>> df = ks.DataFrame({'a': [2, 2, 4, np.nan], ... 'b': [2, np.nan, 2, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df a b a 2.0 2.0 b 2.0 NaN c 4.0 2.0 d NaN NaN >>> df.a.floordiv(df.b) a 1.0 b NaN c 2.0 d NaN Name: a, dtype: float64 >>> df.a.rfloordiv(df.b) a 1.0 b NaN c 0.0 d NaN Name: a, dtype: float64 """ T = TypeVar("T") # Needed to disambiguate Series.str and str type str_type = str class Series(_Frame, IndexOpsMixin, Generic[T]): """ Koala Series that corresponds to Pandas Series logically. This holds Spark Column internally. :ivar _internal: an internal immutable Frame to manage metadata. :type _internal: _InternalFrame :ivar _kdf: Parent's Koalas DataFrame :type _kdf: ks.DataFrame Parameters ---------- data : array-like, dict, or scalar value, Pandas Series Contains data stored in Series If data is a dict, argument order is maintained for Python 3.6 and later. Note that if `data` is a Pandas Series, other arguments should not be used. index : array-like or Index (1d) Values must be hashable and have the same length as `data`. Non-unique index values are allowed. Will default to RangeIndex (0, 1, 2, ..., n) if not provided. If both a dict and index sequence are used, the index will override the keys found in the dict. dtype : numpy.dtype or None If None, dtype will be inferred copy : boolean, default False Copy input data """ def __init__(self, data=None, index=None, dtype=None, name=None, copy=False, fastpath=False, anchor=None): if isinstance(data, _InternalFrame): assert dtype is None assert name is None assert not copy assert not fastpath IndexOpsMixin.__init__(self, data, anchor) else: if isinstance(data, pd.Series): assert index is None assert dtype is None assert name is None assert not copy assert anchor is None assert not fastpath s = data else: s = pd.Series( data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath) kdf = DataFrame(s) IndexOpsMixin.__init__(self, kdf._internal.copy(scol=kdf._internal.data_scols[0]), kdf) @property def _index_map(self) -> List[IndexMap]: return self._internal.index_map def _with_new_scol(self, scol: spark.Column) -> 'Series': """ Copy Koalas Series with the new Spark Column. :param scol: the new Spark Column :return: the copied Series """ return Series(self._internal.copy(scol=scol), anchor=self._kdf) @property def dtypes(self): """Return the dtype object of the underlying data. >>> s = ks.Series(list('abc')) >>> s.dtype == s.dtypes True """ return self.dtype @property def spark_type(self): """ Returns the data type as defined by Spark, as a Spark DataType object.""" return self.schema.fields[-1].dataType plot = CachedAccessor("plot", KoalasSeriesPlotMethods) # Arithmetic Operators def add(self, other): return (self + other).rename(self.name) add.__doc__ = _flex_doc_SERIES.format( desc='Addition', op_name="+", equiv="series + other", reverse='radd', series_examples=_add_example_SERIES) def radd(self, other): return (other + self).rename(self.name) radd.__doc__ = _flex_doc_SERIES.format( desc='Reverse Addition', op_name="+", equiv="other + series", reverse='add', series_examples=_add_example_SERIES) def div(self, other): return (self / other).rename(self.name) div.__doc__ = _flex_doc_SERIES.format( desc='Floating division', op_name="/", equiv="series / other", reverse='rdiv', series_examples=_div_example_SERIES) divide = div def rdiv(self, other): return (other / self).rename(self.name) rdiv.__doc__ = _flex_doc_SERIES.format( desc='Reverse Floating division', op_name="/", equiv="other / series", reverse='div', series_examples=_div_example_SERIES) def truediv(self, other): return (self / other).rename(self.name) truediv.__doc__ = _flex_doc_SERIES.format( desc='Floating division', op_name="/", equiv="series / other", reverse='rtruediv', series_examples=_div_example_SERIES) def rtruediv(self, other): return (other / self).rename(self.name) rtruediv.__doc__ = _flex_doc_SERIES.format( desc='Reverse Floating division', op_name="/", equiv="other / series", reverse='truediv', series_examples=_div_example_SERIES) def mul(self, other): return (self * other).rename(self.name) mul.__doc__ = _flex_doc_SERIES.format( desc='Multiplication', op_name="*", equiv="series * other", reverse='rmul', series_examples=_mul_example_SERIES) multiply = mul def rmul(self, other): return (other * self).rename(self.name) rmul.__doc__ = _flex_doc_SERIES.format( desc='Reverse Multiplication', op_name="*", equiv="other * series", reverse='mul', series_examples=_mul_example_SERIES) def sub(self, other): return (self - other).rename(self.name) sub.__doc__ = _flex_doc_SERIES.format( desc='Subtraction', op_name="-", equiv="series - other", reverse='rsub', series_examples=_sub_example_SERIES) subtract = sub def rsub(self, other): return (other - self).rename(self.name) rsub.__doc__ = _flex_doc_SERIES.format( desc='Reverse Subtraction', op_name="-", equiv="other - series", reverse='sub', series_examples=_sub_example_SERIES) def mod(self, other): return (self % other).rename(self.name) mod.__doc__ = _flex_doc_SERIES.format( desc='Modulo', op_name='%', equiv='series % other', reverse='rmod', series_examples=_mod_example_SERIES) def rmod(self, other): return (other % self).rename(self.name) rmod.__doc__ = _flex_doc_SERIES.format( desc='Reverse Modulo', op_name='%', equiv='other % series', reverse='mod', series_examples=_mod_example_SERIES) def pow(self, other): return (self ** other).rename(self.name) pow.__doc__ = _flex_doc_SERIES.format( desc='Exponential power of series', op_name='**', equiv='series ** other', reverse='rpow', series_examples=_pow_example_SERIES) def rpow(self, other): return (other ** self).rename(self.name) rpow.__doc__ = _flex_doc_SERIES.format( desc='Reverse Exponential power', op_name='**', equiv='other ** series', reverse='pow', series_examples=_pow_example_SERIES) def floordiv(self, other): return (self // other).rename(self.name) floordiv.__doc__ = _flex_doc_SERIES.format( desc='Integer division', op_name='//', equiv='series // other', reverse='rfloordiv', series_examples=_floordiv_example_SERIES) def rfloordiv(self, other): return (other // self).rename(self.name) rfloordiv.__doc__ = _flex_doc_SERIES.format( desc='Reverse Integer division', op_name='//', equiv='other // series', reverse='floordiv', series_examples=_floordiv_example_SERIES) # Comparison Operators def eq(self, other): """ Compare if the current value is equal to the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a == 1 a True b False c False d False Name: a, dtype: bool >>> df.b.eq(1) a True b None c True d None Name: b, dtype: object """ return (self == other).rename(self.name) equals = eq def gt(self, other): """ Compare if the current value is greater than the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a > 1 a False b True c True d True Name: a, dtype: bool >>> df.b.gt(1) a False b None c False d None Name: b, dtype: object """ return (self > other).rename(self.name) def ge(self, other): """ Compare if the current value is greater than or equal to the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a >= 2 a False b True c True d True Name: a, dtype: bool >>> df.b.ge(2) a False b None c False d None Name: b, dtype: object """ return (self >= other).rename(self.name) def lt(self, other): """ Compare if the current value is less than the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a < 1 a False b False c False d False Name: a, dtype: bool >>> df.b.lt(2) a True b None c True d None Name: b, dtype: object """ return (self < other).rename(self.name) def le(self, other): """ Compare if the current value is less than or equal to the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a <= 2 a True b True c False d False Name: a, dtype: bool >>> df.b.le(2) a True b None c True d None Name: b, dtype: object """ return (self <= other).rename(self.name) def ne(self, other): """ Compare if the current value is not equal to the other. >>> df = ks.DataFrame({'a': [1, 2, 3, 4], ... 'b': [1, np.nan, 1, np.nan]}, ... index=['a', 'b', 'c', 'd'], columns=['a', 'b']) >>> df.a != 1 a False b True c True d True Name: a, dtype: bool >>> df.b.ne(1) a False b None c False d None Name: b, dtype: object """ return (self != other).rename(self.name) # TODO: arg should support Series # TODO: NaN and None def map(self, arg): """ Map values of Series according to input correspondence. Used for substituting each value in a Series with another value, that may be derived from a function, a ``dict``. .. note:: make sure the size of the dictionary is not huge because it could downgrade the performance or throw OutOfMemoryError due to a huge expression within Spark. Consider the input as a functions as an alternative instead in this case. Parameters ---------- arg : function or dict Mapping correspondence. Returns ------- Series Same index as caller. See Also -------- Series.apply : For applying more complex functions on a Series. DataFrame.applymap : Apply a function elementwise on a whole DataFrame. Notes ----- When ``arg`` is a dictionary, values in Series that are not in the dictionary (as keys) are converted to ``None``. However, if the dictionary is a ``dict`` subclass that defines ``__missing__`` (i.e. provides a method for default values), then this default is used rather than ``None``. Examples -------- >>> s = ks.Series(['cat', 'dog', None, 'rabbit']) >>> s 0 cat 1 dog 2 None 3 rabbit Name: 0, dtype: object ``map`` accepts a ``dict``. Values that are not found in the ``dict`` are converted to ``None``, unless the dict has a default value (e.g. ``defaultdict``): >>> s.map({'cat': 'kitten', 'dog': 'puppy'}) 0 kitten 1 puppy 2 None 3 None Name: 0, dtype: object It also accepts a function: >>> def format(x) -> str: ... return 'I am a {}'.format(x) >>> s.map(format) 0 I am a cat 1 I am a dog 2 I am a None 3 I am a rabbit Name: 0, dtype: object """ if isinstance(arg, dict): is_start = True # In case dictionary is empty. current = F.when(F.lit(False), F.lit(None).cast(self.spark_type)) for to_replace, value in arg.items(): if is_start: current = F.when(self._scol == F.lit(to_replace), value) is_start = False else: current = current.when(self._scol == F.lit(to_replace), value) if hasattr(arg, "__missing__"): tmp_val = arg[np._NoValue] del arg[np._NoValue] # Remove in case it's set in defaultdict. current = current.otherwise(F.lit(tmp_val)) else: current = current.otherwise(F.lit(None).cast(self.spark_type)) return self._with_new_scol(current).rename(self.name) else: return self.apply(arg) def astype(self, dtype) -> 'Series': """ Cast a Koalas object to a specified dtype ``dtype``. Parameters ---------- dtype : data type Use a numpy.dtype or Python type to cast entire pandas object to the same type. Returns ------- casted : same type as caller See Also -------- to_datetime : Convert argument to datetime. Examples -------- >>> ser = ks.Series([1, 2], dtype='int32') >>> ser 0 1 1 2 Name: 0, dtype: int32 >>> ser.astype('int64') 0 1 1 2 Name: 0, dtype: int64 """ from databricks.koalas.typedef import as_spark_type spark_type = as_spark_type(dtype) if not spark_type: raise ValueError("Type {} not understood".format(dtype)) return self._with_new_scol(self._scol.cast(spark_type)) def getField(self, name): if not isinstance(self.schema, StructType): raise AttributeError("Not a struct: {}".format(self.schema)) else: fnames = self.schema.fieldNames() if name not in fnames: raise AttributeError( "Field {} not found, possible values are {}".format(name, ", ".join(fnames))) return self._with_new_scol(self._scol.getField(name)) def alias(self, name): """An alias for :meth:`Series.rename`.""" return self.rename(name) @property def schema(self) -> StructType: """Return the underlying Spark DataFrame's schema.""" return self.to_dataframe()._sdf.schema @property def shape(self): """Return a tuple of the shape of the underlying data.""" return len(self), @property def ndim(self): """Returns number of dimensions of the Series.""" return 1 @property def name(self) -> Union[str, Tuple[str, ...]]: """Return name of the Series.""" name = self._internal.column_index[0] if name is not None and len(name) == 1: return name[0] else: return name @name.setter def name(self, name: Union[str, Tuple[str, ...]]): self.rename(name, inplace=True) # TODO: Functionality and documentation should be matched. Currently, changing index labels # taking dictionary and function to change index are not supported. def rename(self, index: Union[str, Tuple[str, ...]] = None, **kwargs): """ Alter Series name. Parameters ---------- index : scalar Scalar will alter the ``Series.name`` attribute. inplace : bool, default False Whether to return a new Series. If True then value of copy is ignored. Returns ------- Series Series with name altered. Examples -------- >>> s = ks.Series([1, 2, 3]) >>> s 0 1 1 2 2 3 Name: 0, dtype: int64 >>> s.rename("my_name") # scalar, changes Series.name 0 1 1 2 2 3 Name: my_name, dtype: int64 """ if index is None: scol = self._scol else: scol = self._scol.alias(str(index)) internal = self._internal.copy( scol=scol, column_index=[index if index is None or isinstance(index, tuple) else (index,)]) if kwargs.get('inplace', False): self._internal = internal return self else: return Series(internal, anchor=self._kdf) @property def index(self): """The index (axis labels) Column of the Series. Currently not supported when the DataFrame has no index. See Also -------- Index """ return self._kdf.index @property def is_unique(self): """ Return boolean if values in the object are unique Returns ------- is_unique : boolean >>> ks.Series([1, 2, 3]).is_unique True >>> ks.Series([1, 2, 2]).is_unique False >>> ks.Series([1, 2, 3, None]).is_unique True """ sdf = self._kdf._sdf.select(self._scol) col = self._scol # Here we check: # 1. the distinct count without nulls and count without nulls for non-null values # 2. count null values and see if null is a distinct value. # # This workaround is in order to calculate the distinct count including nulls in # single pass. Note that COUNT(DISTINCT expr) in Spark is designed to ignore nulls. return sdf.select( (F.count(col) == F.countDistinct(col)) & (F.count(F.when(col.isNull(), 1).otherwise(None)) <= 1) ).collect()[0][0] def reset_index(self, level=None, drop=False, name=None, inplace=False): """ Generate a new DataFrame or Series with the index reset. This is useful when the index needs to be treated as a column, or when the index is meaningless and needs to be reset to the default before another operation. Parameters ---------- level : int, str, tuple, or list, default optional For a Series with a MultiIndex, only remove the specified levels from the index. Removes all levels by default. drop : bool, default False Just reset the index, without inserting it as a column in the new DataFrame. name : object, optional The name to use for the column containing the original Series values. Uses self.name by default. This argument is ignored when drop is True. inplace : bool, default False Modify the Series in place (do not create a new object). Returns ------- Series or DataFrame When `drop` is False (the default), a DataFrame is returned. The newly created columns will come first in the DataFrame, followed by the original Series values. When `drop` is True, a `Series` is returned. In either case, if ``inplace=True``, no value is returned. Examples -------- >>> s = ks.Series([1, 2, 3, 4], name='foo', ... index=pd.Index(['a', 'b', 'c', 'd'], name='idx')) Generate a DataFrame with default index. >>> s.reset_index() idx foo 0 a 1 1 b 2 2 c 3 3 d 4 To specify the name of the new column use `name`. >>> s.reset_index(name='values') idx values 0 a 1 1 b 2 2 c 3 3 d 4 To generate a new Series with the default set `drop` to True. >>> s.reset_index(drop=True) 0 1 1 2 2 3 3 4 Name: foo, dtype: int64 To update the Series in place, without generating a new one set `inplace` to True. Note that it also requires ``drop=True``. >>> s.reset_index(inplace=True, drop=True) >>> s 0 1 1 2 2 3 3 4 Name: foo, dtype: int64 """ if inplace and not drop: raise TypeError('Cannot reset_index inplace on a Series to create a DataFrame') if name is not None: kdf = self.rename(name).to_dataframe() else: kdf = self.to_dataframe() kdf = kdf.reset_index(level=level, drop=drop) if drop: kseries = _col(kdf) if inplace: self._internal = kseries._internal self._kdf = kseries._kdf else: return kseries else: return kdf def to_frame(self, name: Union[str, Tuple[str, ...]] = None) -> spark.DataFrame: """ Convert Series to DataFrame. Parameters ---------- name : object, default None The passed name should substitute for the series name (if it has one). Returns ------- DataFrame DataFrame representation of Series. Examples -------- >>> s = ks.Series(["a", "b", "c"]) >>> s.to_frame() 0 0 a 1 b 2 c >>> s = ks.Series(["a", "b", "c"], name="vals") >>> s.to_frame() vals 0 a 1 b 2 c """ if name is not None: renamed = self.rename(name) else: renamed = self sdf = renamed._internal.spark_internal_df column_index = None # type: Optional[List[Tuple[str, ...]]] if renamed._internal.column_index[0] is None: column_index = [('0',)] column_index_names = None else: column_index = renamed._internal.column_index column_index_names = renamed._internal.column_index_names internal = _InternalFrame(sdf=sdf, data_columns=[sdf.schema[-1].name], index_map=renamed._internal.index_map, column_index=column_index, column_index_names=column_index_names) return DataFrame(internal) to_dataframe = to_frame def to_string(self, buf=None, na_rep='NaN', float_format=None, header=True, index=True, length=False, dtype=False, name=False, max_rows=None): """ Render a string representation of the Series. .. note:: This method should only be used if the resulting Pandas object is expected to be small, as all the data is loaded into the driver's memory. If the input is large, set max_rows parameter. Parameters ---------- buf : StringIO-like, optional buffer to write to na_rep : string, optional string representation of NAN to use, default 'NaN' float_format : one-parameter function, optional formatter function to apply to columns' elements if they are floats default None header : boolean, default True Add the Series header (index name) index : bool, optional Add index (row) labels, default True length : boolean, default False Add the Series length dtype : boolean, default False Add the Series dtype name : boolean, default False Add the Series name if not None max_rows : int, optional Maximum number of rows to show before truncating. If None, show all. Returns ------- formatted : string (if not buffer passed) Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], columns=['dogs', 'cats']) >>> print(df['dogs'].to_string()) 0 0.2 1 0.0 2 0.6 3 0.2 >>> print(df['dogs'].to_string(max_rows=2)) 0 0.2 1 0.0 """ # Make sure locals() call is at the top of the function so we don't capture local variables. args = locals() if max_rows is not None: kseries = self.head(max_rows) else: kseries = self return validate_arguments_and_invoke_function( kseries._to_internal_pandas(), self.to_string, pd.Series.to_string, args) def to_clipboard(self, excel=True, sep=None, **kwargs): # Docstring defined below by reusing DataFrame.to_clipboard's. args = locals() kseries = self return validate_arguments_and_invoke_function( kseries._to_internal_pandas(), self.to_clipboard, pd.Series.to_clipboard, args) to_clipboard.__doc__ = DataFrame.to_clipboard.__doc__ def to_dict(self, into=dict): """ Convert Series to {label -> value} dict or dict-like object. .. note:: This method should only be used if the resulting Pandas DataFrame is expected to be small, as all the data is loaded into the driver's memory. Parameters ---------- into : class, default dict The collections.abc.Mapping subclass to use as the return object. Can be the actual class or an empty instance of the mapping type you want. If you want a collections.defaultdict, you must pass it initialized. Returns ------- collections.abc.Mapping Key-value representation of Series. Examples -------- >>> s = ks.Series([1, 2, 3, 4]) >>> s_dict = s.to_dict() >>> sorted(s_dict.items()) [(0, 1), (1, 2), (2, 3), (3, 4)] >>> from collections import OrderedDict, defaultdict >>> s.to_dict(OrderedDict) OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)]) >>> dd = defaultdict(list) >>> s.to_dict(dd) # doctest: +ELLIPSIS defaultdict(<class 'list'>, {...}) """ # Make sure locals() call is at the top of the function so we don't capture local variables. args = locals() kseries = self return validate_arguments_and_invoke_function( kseries._to_internal_pandas(), self.to_dict, pd.Series.to_dict, args) def to_latex(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, bold_rows=False, column_format=None, longtable=None, escape=None, encoding=None, decimal='.', multicolumn=None, multicolumn_format=None, multirow=None): args = locals() kseries = self return validate_arguments_and_invoke_function( kseries._to_internal_pandas(), self.to_latex, pd.Series.to_latex, args) to_latex.__doc__ = DataFrame.to_latex.__doc__ def to_pandas(self): """ Return a pandas Series. .. note:: This method should only be used if the resulting Pandas object is expected to be small, as all the data is loaded into the driver's memory. If the input is large, set max_rows parameter. Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], columns=['dogs', 'cats']) >>> df['dogs'].to_pandas() 0 0.2 1 0.0 2 0.6 3 0.2 Name: dogs, dtype: float64 """ return _col(self._internal.pandas_df.copy()) # Alias to maintain backward compatibility with Spark toPandas = to_pandas def to_list(self): """ Return a list of the values. These are each a scalar type, which is a Python scalar (for str, int, float) or a pandas scalar (for Timestamp/Timedelta/Interval/Period) .. note:: This method should only be used if the resulting list is expected to be small, as all the data is loaded into the driver's memory. """ return self._to_internal_pandas().to_list() tolist = to_list def fillna(self, value=None, method=None, axis=None, inplace=False, limit=None): """Fill NA/NaN values. .. note:: the current implementation of 'method' parameter in fillna uses Spark's Window without specifying partition specification. This leads to move all data into single partition in single machine and could cause serious performance degradation. Avoid this method against very large dataset. Parameters ---------- value : scalar, dict, Series Value to use to fill holes. alternately a dict/Series of values specifying which value to use for each column. DataFrame is not supported. method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None Method to use for filling holes in reindexed Series pad / ffill: propagate last valid observation forward to next valid backfill / bfill: use NEXT valid observation to fill gap axis : {0 or `index`} 1 and `columns` are not supported. inplace : boolean, default False Fill in place (do not create a new object) limit : int, default None If method is specified, this is the maximum number of consecutive NaN values to forward/backward fill. In other words, if there is a gap with more than this number of consecutive NaNs, it will only be partially filled. If method is not specified, this is the maximum number of entries along the entire axis where NaNs will be filled. Must be greater than 0 if not None Returns ------- Series Series with NA entries filled. Examples -------- >>> s = ks.Series([np.nan, 2, 3, 4, np.nan, 6], name='x') >>> s 0 NaN 1 2.0 2 3.0 3 4.0 4 NaN 5 6.0 Name: x, dtype: float64 Replace all NaN elements with 0s. >>> s.fillna(0) 0 0.0 1 2.0 2 3.0 3 4.0 4 0.0 5 6.0 Name: x, dtype: float64 We can also propagate non-null values forward or backward. >>> s.fillna(method='ffill') 0 NaN 1 2.0 2 3.0 3 4.0 4 4.0 5 6.0 Name: x, dtype: float64 >>> s = ks.Series([np.nan, 'a', 'b', 'c', np.nan], name='x') >>> s.fillna(method='ffill') 0 None 1 a 2 b 3 c 4 c Name: x, dtype: object """ return self._fillna(value, method, axis, inplace, limit) def _fillna(self, value=None, method=None, axis=None, inplace=False, limit=None, part_cols=()): if axis is None: axis = 0 if not (axis == 0 or axis == "index"): raise NotImplementedError("fillna currently only works for axis=0 or axis='index'") if (value is None) and (method is None): raise ValueError("Must specify a fillna 'value' or 'method' parameter.") if (method is not None) and (method not in ['ffill', 'pad', 'backfill', 'bfill']): raise ValueError("Expecting 'pad', 'ffill', 'backfill' or 'bfill'.") if self.isnull().sum() == 0: if inplace: self._internal = self._internal.copy() self._kdf = self._kdf.copy() else: return self column_name = self.name scol = self._scol if value is not None: if not isinstance(value, (float, int, str, bool)): raise TypeError("Unsupported type %s" % type(value)) if limit is not None: raise ValueError('limit parameter for value is not support now') scol = F.when(scol.isNull(), value).otherwise(scol) else: if method in ['ffill', 'pad']: func = F.last end = (Window.currentRow - 1) if limit is not None: begin = Window.currentRow - limit else: begin = Window.unboundedPreceding elif method in ['bfill', 'backfill']: func = F.first begin = Window.currentRow + 1 if limit is not None: end = Window.currentRow + limit else: end = Window.unboundedFollowing window = Window.partitionBy(*part_cols).orderBy(self._internal.index_scols)\ .rowsBetween(begin, end) scol = F.when(scol.isNull(), func(scol, True).over(window)).otherwise(scol) kseries = self._with_new_scol(scol).rename(column_name) if inplace: self._internal = kseries._internal self._kdf = kseries._kdf else: return kseries def dropna(self, axis=0, inplace=False, **kwargs): """ Return a new Series with missing values removed. Parameters ---------- axis : {0 or 'index'}, default 0 There is only one axis to drop values from. inplace : bool, default False If True, do operation inplace and return None. **kwargs Not in use. Returns ------- Series Series with NA entries dropped from it. Examples -------- >>> ser = ks.Series([1., 2., np.nan]) >>> ser 0 1.0 1 2.0 2 NaN Name: 0, dtype: float64 Drop NA values from a Series. >>> ser.dropna() 0 1.0 1 2.0 Name: 0, dtype: float64 Keep the Series with valid entries in the same variable. >>> ser.dropna(inplace=True) >>> ser 0 1.0 1 2.0 Name: 0, dtype: float64 """ # TODO: last two examples from Pandas produce different results. kseries = _col(self.to_dataframe().dropna(axis=axis, inplace=False)) if inplace: self._internal = kseries._internal self._kdf = kseries._kdf else: return kseries def clip(self, lower: Union[float, int] = None, upper: Union[float, int] = None) -> 'Series': """ Trim values at input threshold(s). Assigns values outside boundary to boundary values. Parameters ---------- lower : float or int, default None Minimum threshold value. All values below this threshold will be set to it. upper : float or int, default None Maximum threshold value. All values above this threshold will be set to it. Returns ------- Series Series with the values outside the clip boundaries replaced Examples -------- >>> ks.Series([0, 2, 4]).clip(1, 3) 0 1 1 2 2 3 Name: 0, dtype: int64 Notes ----- One difference between this implementation and pandas is that running `pd.Series(['a', 'b']).clip(0, 1)` will crash with "TypeError: '<=' not supported between instances of 'str' and 'int'" while `ks.Series(['a', 'b']).clip(0, 1)` will output the original Series, simply ignoring the incompatible types. """ return _col(self.to_dataframe().clip(lower, upper)) def head(self, n=5): """ Return the first n rows. This function returns the first n rows for the object based on position. It is useful for quickly testing if your object has the right type of data in it. Parameters ---------- n : Integer, default = 5 Returns ------- The first n rows of the caller object. Examples -------- >>> df = ks.DataFrame({'animal':['alligator', 'bee', 'falcon', 'lion']}) >>> df.animal.head(2) # doctest: +NORMALIZE_WHITESPACE 0 alligator 1 bee Name: animal, dtype: object """ return _col(self.to_dataframe().head(n)) # TODO: Categorical type isn't supported (due to PySpark's limitation) and # some doctests related with timestamps were not added. def unique(self): """ Return unique values of Series object. Uniques are returned in order of appearance. Hash table-based unique, therefore does NOT sort. .. note:: This method returns newly creased Series whereas Pandas returns the unique values as a NumPy array. Returns ------- Returns the unique values as a Series. See Examples section. Examples -------- >>> kser = ks.Series([2, 1, 3, 3], name='A') >>> kser.unique() 0 1 1 3 2 2 Name: A, dtype: int64 >>> ks.Series([pd.Timestamp('2016-01-01') for _ in range(3)]).unique() 0 2016-01-01 Name: 0, dtype: datetime64[ns] >>> kser.name = ('x', 'a') >>> kser.unique() 0 1 1 3 2 2 Name: (x, a), dtype: int64 """ sdf = self._internal.sdf.select(self._scol).distinct() internal = _InternalFrame(sdf=sdf, data_columns=[self._internal.data_columns[0]], column_index=[self._internal.column_index[0]], column_index_names=self._internal.column_index_names) return _col(DataFrame(internal)) def nunique(self, dropna: bool = True, approx: bool = False, rsd: float = 0.05) -> int: """ Return number of unique elements in the object. Excludes NA values by default. Parameters ---------- dropna : bool, default True Don’t include NaN in the count. approx: bool, default False If False, will use the exact algorithm and return the exact number of unique. If True, it uses the HyperLogLog approximate algorithm, which is significantly faster for large amount of data. Note: This parameter is specific to Koalas and is not found in pandas. rsd: float, default 0.05 Maximum estimation error allowed in the HyperLogLog algorithm. Note: Just like ``approx`` this parameter is specific to Koalas. Returns ------- The number of unique values as an int. Examples -------- >>> ks.Series([1, 2, 3, np.nan]).nunique() 3 >>> ks.Series([1, 2, 3, np.nan]).nunique(dropna=False) 4 On big data, we recommend using the approximate algorithm to speed up this function. The result will be very close to the exact unique count. >>> ks.Series([1, 2, 3, np.nan]).nunique(approx=True) 3 """ res = self._kdf._sdf.select([self._nunique(dropna, approx, rsd)]) return res.collect()[0][0] def _nunique(self, dropna=True, approx=False, rsd=0.05): name = self.name count_fn = partial(F.approx_count_distinct, rsd=rsd) if approx else F.countDistinct if dropna: return count_fn(name).alias(name) else: return (count_fn(name) + F.when(F.count(F.when(self._internal.scol_for(name).isNull(), 1) .otherwise(None)) >= 1, 1).otherwise(0)).alias(name) # TODO: Update Documentation for Bins Parameter when its supported def value_counts(self, normalize=False, sort=True, ascending=False, bins=None, dropna=True): """ Return a Series containing counts of unique values. The resulting object will be in descending order so that the first element is the most frequently-occurring element. Excludes NA values by default. Parameters ---------- normalize : boolean, default False If True then the object returned will contain the relative frequencies of the unique values. sort : boolean, default True Sort by values. ascending : boolean, default False Sort in ascending order. bins : Not Yet Supported dropna : boolean, default True Don't include counts of NaN. Returns ------- counts : Series See Also -------- Series.count: Number of non-NA elements in a Series. Examples -------- >>> df = ks.DataFrame({'x':[0, 0, 1, 1, 1, np.nan]}) >>> df.x.value_counts() # doctest: +NORMALIZE_WHITESPACE 1.0 3 0.0 2 Name: x, dtype: int64 With `normalize` set to `True`, returns the relative frequency by dividing all values by the sum of values. >>> df.x.value_counts(normalize=True) # doctest: +NORMALIZE_WHITESPACE 1.0 0.6 0.0 0.4 Name: x, dtype: float64 **dropna** With `dropna` set to `False` we can also see NaN index values. >>> df.x.value_counts(dropna=False) # doctest: +NORMALIZE_WHITESPACE 1.0 3 0.0 2 NaN 1 Name: x, dtype: int64 """ if bins is not None: raise NotImplementedError("value_counts currently does not support bins") if dropna: sdf_dropna = self._kdf._sdf.filter(self.notna()._scol) else: sdf_dropna = self._kdf._sdf index_name = '__index_level_0__' sdf = sdf_dropna.groupby(self._scol.alias(index_name)).count() if sort: if ascending: sdf = sdf.orderBy(F.col('count')) else: sdf = sdf.orderBy(F.col('count').desc()) if normalize: sum = sdf_dropna.count() sdf = sdf.withColumn('count', F.col('count') / F.lit(sum)) internal = _InternalFrame(sdf=sdf, data_columns=['count'], index_map=[(index_name, None)], column_index=self._internal.column_index, column_index_names=self._internal.column_index_names) return _col(DataFrame(internal)) def sort_values(self, ascending: bool = True, inplace: bool = False, na_position: str = 'last') -> Union['Series', None]: """ Sort by the values. Sort a Series in ascending or descending order by some criterion. Parameters ---------- ascending : bool or list of bool, default True Sort ascending vs. descending. Specify list for multiple sort orders. If this is a list of bools, must match the length of the by. inplace : bool, default False if True, perform operation in-place na_position : {'first', 'last'}, default 'last' `first` puts NaNs at the beginning, `last` puts NaNs at the end Returns ------- sorted_obj : Series ordered by values. Examples -------- >>> s = ks.Series([np.nan, 1, 3, 10, 5]) >>> s 0 NaN 1 1.0 2 3.0 3 10.0 4 5.0 Name: 0, dtype: float64 Sort values ascending order (default behaviour) >>> s.sort_values(ascending=True) 1 1.0 2 3.0 4 5.0 3 10.0 0 NaN Name: 0, dtype: float64 Sort values descending order >>> s.sort_values(ascending=False) 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN Name: 0, dtype: float64 Sort values inplace >>> s.sort_values(ascending=False, inplace=True) >>> s 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN Name: 0, dtype: float64 Sort values putting NAs first >>> s.sort_values(na_position='first') 0 NaN 1 1.0 2 3.0 4 5.0 3 10.0 Name: 0, dtype: float64 Sort a series of strings >>> s = ks.Series(['z', 'b', 'd', 'a', 'c']) >>> s 0 z 1 b 2 d 3 a 4 c Name: 0, dtype: object >>> s.sort_values() 3 a 1 b 4 c 2 d 0 z Name: 0, dtype: object """ kseries = _col(self.to_dataframe().sort_values(by=self.name, ascending=ascending, na_position=na_position)) if inplace: self._internal = kseries._internal self._kdf = kseries._kdf return None else: return kseries def sort_index(self, axis: int = 0, level: Optional[Union[int, List[int]]] = None, ascending: bool = True, inplace: bool = False, kind: str = None, na_position: str = 'last') \ -> Optional['Series']: """ Sort object by labels (along an axis) Parameters ---------- axis : index, columns to direct sorting. Currently, only axis = 0 is supported. level : int or level name or list of ints or list of level names if not None, sort on values in specified index level(s) ascending : boolean, default True Sort ascending vs. descending inplace : bool, default False if True, perform operation in-place kind : str, default None Koalas does not allow specifying the sorting algorithm at the moment, default None na_position : {‘first’, ‘last’}, default ‘last’ first puts NaNs at the beginning, last puts NaNs at the end. Not implemented for MultiIndex. Returns ------- sorted_obj : Series Examples -------- >>> df = ks.Series([2, 1, np.nan], index=['b', 'a', np.nan]) >>> df.sort_index() a 1.0 b 2.0 NaN NaN Name: 0, dtype: float64 >>> df.sort_index(ascending=False) b 2.0 a 1.0 NaN NaN Name: 0, dtype: float64 >>> df.sort_index(na_position='first') NaN NaN a 1.0 b 2.0 Name: 0, dtype: float64 >>> df.sort_index(inplace=True) >>> df a 1.0 b 2.0 NaN NaN Name: 0, dtype: float64 >>> df = ks.Series(range(4), index=[['b', 'b', 'a', 'a'], [1, 0, 1, 0]], name='0') >>> df.sort_index() a 0 3 1 2 b 0 1 1 0 Name: 0, dtype: int64 >>> df.sort_index(level=1) # doctest: +SKIP a 0 3 b 0 1 a 1 2 b 1 0 Name: 0, dtype: int64 >>> df.sort_index(level=[1, 0]) a 0 3 b 0 1 a 1 2 b 1 0 Name: 0, dtype: int64 """ kseries = _col(self.to_dataframe().sort_index(axis=axis, level=level, ascending=ascending, kind=kind, na_position=na_position)) if inplace: self._internal = kseries._internal self._kdf = kseries._kdf return None else: return kseries def add_prefix(self, prefix): """ Prefix labels with string `prefix`. For Series, the row labels are prefixed. For DataFrame, the column labels are prefixed. Parameters ---------- prefix : str The string to add before each label. Returns ------- Series New Series with updated labels. See Also -------- Series.add_suffix: Suffix column labels with string `suffix`. DataFrame.add_suffix: Suffix column labels with string `suffix`. DataFrame.add_prefix: Prefix column labels with string `prefix`. Examples -------- >>> s = ks.Series([1, 2, 3, 4]) >>> s 0 1 1 2 2 3 3 4 Name: 0, dtype: int64 >>> s.add_prefix('item_') item_0 1 item_1 2 item_2 3 item_3 4 Name: 0, dtype: int64 """ assert isinstance(prefix, str) kdf = self.to_dataframe() internal = kdf._internal sdf = internal.sdf sdf = sdf.select([F.concat(F.lit(prefix), scol_for(sdf, index_column)).alias(index_column) for index_column in internal.index_columns] + internal.data_scols) kdf._internal = internal.copy(sdf=sdf) return _col(kdf) def add_suffix(self, suffix): """ Suffix labels with string suffix. For Series, the row labels are suffixed. For DataFrame, the column labels are suffixed. Parameters ---------- suffix : str The string to add after each label. Returns ------- Series New Series with updated labels. See Also -------- Series.add_prefix: Prefix row labels with string `prefix`. DataFrame.add_prefix: Prefix column labels with string `prefix`. DataFrame.add_suffix: Suffix column labels with string `suffix`. Examples -------- >>> s = ks.Series([1, 2, 3, 4]) >>> s 0 1 1 2 2 3 3 4 Name: 0, dtype: int64 >>> s.add_suffix('_item') 0_item 1 1_item 2 2_item 3 3_item 4 Name: 0, dtype: int64 """ assert isinstance(suffix, str) kdf = self.to_dataframe() internal = kdf._internal sdf = internal.sdf sdf = sdf.select([F.concat(scol_for(sdf, index_column), F.lit(suffix)).alias(index_column) for index_column in internal.index_columns] + internal.data_scols) kdf._internal = internal.copy(sdf=sdf) return _col(kdf) def corr(self, other, method='pearson'): """ Compute correlation with `other` Series, excluding missing values. Parameters ---------- other : Series method : {'pearson', 'spearman'} * pearson : standard correlation coefficient * spearman : Spearman rank correlation Returns ------- correlation : float Examples -------- >>> df = ks.DataFrame({'s1': [.2, .0, .6, .2], ... 's2': [.3, .6, .0, .1]}) >>> s1 = df.s1 >>> s2 = df.s2 >>> s1.corr(s2, method='pearson') # doctest: +ELLIPSIS -0.851064... >>> s1.corr(s2, method='spearman') # doctest: +ELLIPSIS -0.948683... Notes ----- There are behavior differences between Koalas and pandas. * the `method` argument only accepts 'pearson', 'spearman' * the data should not contain NaNs. Koalas will return an error. * Koalas doesn't support the following argument(s). * `min_periods` argument is not supported """ # This implementation is suboptimal because it computes more than necessary, # but it should be a start df = self._kdf.assign(corr_arg1=self, corr_arg2=other)[["corr_arg1", "corr_arg2"]] c = df.corr(method=method) return c.loc["corr_arg1", "corr_arg2"] def nsmallest(self, n: int = 5) -> 'Series': """ Return the smallest `n` elements. Parameters ---------- n : int, default 5 Return this many ascending sorted values. Returns ------- Series The `n` smallest values in the Series, sorted in increasing order. See Also -------- Series.nlargest: Get the `n` largest elements. Series.sort_values: Sort Series by values. Series.head: Return the first `n` rows. Notes ----- Faster than ``.sort_values().head(n)`` for small `n` relative to the size of the ``Series`` object. In Koalas, thanks to Spark's lazy execution and query optimizer, the two would have same performance. Examples -------- >>> data = [1, 2, 3, 4, np.nan ,6, 7, 8] >>> s = ks.Series(data) >>> s 0 1.0 1 2.0 2 3.0 3 4.0 4 NaN 5 6.0 6 7.0 7 8.0 Name: 0, dtype: float64 The `n` largest elements where ``n=5`` by default. >>> s.nsmallest() 0 1.0 1 2.0 2 3.0 3 4.0 5 6.0 Name: 0, dtype: float64 >>> s.nsmallest(3) 0 1.0 1 2.0 2 3.0 Name: 0, dtype: float64 """ return _col(self._kdf.nsmallest(n=n, columns=self.name)) def nlargest(self, n: int = 5) -> 'Series': """ Return the largest `n` elements. Parameters ---------- n : int, default 5 Returns ------- Series The `n` largest values in the Series, sorted in decreasing order. See Also -------- Series.nsmallest: Get the `n` smallest elements. Series.sort_values: Sort Series by values. Series.head: Return the first `n` rows. Notes ----- Faster than ``.sort_values(ascending=False).head(n)`` for small `n` relative to the size of the ``Series`` object. In Koalas, thanks to Spark's lazy execution and query optimizer, the two would have same performance. Examples -------- >>> data = [1, 2, 3, 4, np.nan ,6, 7, 8] >>> s = ks.Series(data) >>> s 0 1.0 1 2.0 2 3.0 3 4.0 4 NaN 5 6.0 6 7.0 7 8.0 Name: 0, dtype: float64 The `n` largest elements where ``n=5`` by default. >>> s.nlargest() 7 8.0 6 7.0 5 6.0 3 4.0 2 3.0 Name: 0, dtype: float64 >>> s.nlargest(n=3) 7 8.0 6 7.0 5 6.0 Name: 0, dtype: float64 """ return _col(self._kdf.nlargest(n=n, columns=self.name)) def count(self): """ Return number of non-NA/null observations in the Series. Returns ------- nobs : int Examples -------- Constructing DataFrame from a dictionary: >>> df = ks.DataFrame({"Person": ... ["John", "Myla", "Lewis", "John", "Myla"], ... "Age": [24., np.nan, 21., 33, 26]}) Notice the uncounted NA values: >>> df['Person'].count() 5 >>> df['Age'].count() 4 """ return self._reduce_for_stat_function(_Frame._count_expr, name="count") def append(self, to_append: 'Series', ignore_index: bool = False, verify_integrity: bool = False) -> 'Series': """ Concatenate two or more Series. Parameters ---------- to_append : Series or list/tuple of Series ignore_index : boolean, default False If True, do not use the index labels. verify_integrity : boolean, default False If True, raise Exception on creating index with duplicates Returns ------- appended : Series Examples -------- >>> s1 = ks.Series([1, 2, 3]) >>> s2 = ks.Series([4, 5, 6]) >>> s3 = ks.Series([4, 5, 6], index=[3,4,5]) >>> s1.append(s2) 0 1 1 2 2 3 0 4 1 5 2 6 Name: 0, dtype: int64 >>> s1.append(s3) 0 1 1 2 2 3 3 4 4 5 5 6 Name: 0, dtype: int64 With ignore_index set to True: >>> s1.append(s2, ignore_index=True) 0 1 1 2 2 3 3 4 4 5 5 6 Name: 0, dtype: int64 """ return _col(self.to_dataframe().append(to_append.to_dataframe(), ignore_index, verify_integrity)) def sample(self, n: Optional[int] = None, frac: Optional[float] = None, replace: bool = False, random_state: Optional[int] = None) -> 'Series': return _col(self.to_dataframe().sample( n=n, frac=frac, replace=replace, random_state=random_state)) sample.__doc__ = DataFrame.sample.__doc__ def hist(self, bins=10, **kwds): return self.plot.hist(bins, **kwds) hist.__doc__ = KoalasSeriesPlotMethods.hist.__doc__ def apply(self, func, args=(), **kwds): """ Invoke function on values of Series. Can be a Python function that only works on the Series. .. note:: unlike pandas, it is required for `func` to specify return type hint. Parameters ---------- func : function Python function to apply. Note that type hint for return type is required. args : tuple Positional arguments passed to func after the series value. **kwds Additional keyword arguments passed to func. Returns ------- Series Examples -------- Create a Series with typical summer temperatures for each city. >>> s = ks.Series([20, 21, 12], ... index=['London', 'New York', 'Helsinki']) >>> s London 20 New York 21 Helsinki 12 Name: 0, dtype: int64 Square the values by defining a function and passing it as an argument to ``apply()``. >>> def square(x) -> np.int64: ... return x ** 2 >>> s.apply(square) London 400 New York 441 Helsinki 144 Name: 0, dtype: int64 Define a custom function that needs additional positional arguments and pass these additional arguments using the ``args`` keyword >>> def subtract_custom_value(x, custom_value) -> np.int64: ... return x - custom_value >>> s.apply(subtract_custom_value, args=(5,)) London 15 New York 16 Helsinki 7 Name: 0, dtype: int64 Define a custom function that takes keyword arguments and pass these arguments to ``apply`` >>> def add_custom_values(x, **kwargs) -> np.int64: ... for month in kwargs: ... x += kwargs[month] ... return x >>> s.apply(add_custom_values, june=30, july=20, august=25) London 95 New York 96 Helsinki 87 Name: 0, dtype: int64 Use a function from the Numpy library >>> def numpy_log(col) -> np.float64: ... return np.log(col) >>> s.apply(numpy_log) London 2.995732 New York 3.044522 Helsinki 2.484907 Name: 0, dtype: float64 """ assert callable(func), "the first argument should be a callable function." spec = inspect.getfullargspec(func) return_sig = spec.annotations.get("return", None) if return_sig is None: raise ValueError("Given function must have return type hint; however, not found.") apply_each = wraps(func)(lambda s, *a, **k: s.apply(func, args=a, **k)) wrapped = ks.pandas_wraps(return_col=return_sig)(apply_each) return wrapped(self, *args, **kwds).rename(self.name) def transpose(self, *args, **kwargs): """ Return the transpose, which is by definition self. Examples -------- It returns the same object as the transpose of the given series object, which is by definition self. >>> s = ks.Series([1, 2, 3]) >>> s 0 1 1 2 2 3 Name: 0, dtype: int64 >>> s.transpose() 0 1 1 2 2 3 Name: 0, dtype: int64 """ return Series(self._internal.copy(), anchor=self._kdf) T = property(transpose) def transform(self, func, *args, **kwargs): """ Call ``func`` producing the same type as `self` with transformed values and that has the same axis length as input. .. note:: unlike pandas, it is required for `func` to specify return type hint. Parameters ---------- func : function or list A function or a list of functions to use for transforming the data. *args Positional arguments to pass to `func`. **kwargs Keyword arguments to pass to `func`. Returns ------- An instance of the same type with `self` that must have the same length as input. See Also -------- Series.apply : Invoke function on Series. Examples -------- >>> s = ks.Series(range(3)) >>> s 0 0 1 1 2 2 Name: 0, dtype: int64 >>> def sqrt(x) -> float: ... return np.sqrt(x) >>> s.transform(sqrt) 0 0.000000 1 1.000000 2 1.414214 Name: 0, dtype: float32 Even though the resulting instance must have the same length as the input, it is possible to provide several input functions: >>> def exp(x) -> float: ... return np.exp(x) >>> s.transform([sqrt, exp]) sqrt exp 0 0.000000 1.000000 1 1.000000 2.718282 2 1.414214 7.389056 """ if isinstance(func, list): applied = [] for f in func: applied.append(self.apply(f).rename(f.__name__)) sdf = self._kdf._sdf.select( self._internal.index_scols + [c._scol for c in applied]) internal = self.to_dataframe()._internal.copy( sdf=sdf, data_columns=[c._internal.data_columns[0] for c in applied], column_index=[c._internal.column_index[0] for c in applied], column_index_names=None) return DataFrame(internal) else: return self.apply(func, args=args, **kwargs) def round(self, decimals=0): """ Round each value in a Series to the given number of decimals. Parameters ---------- decimals : int Number of decimal places to round to (default: 0). If decimals is negative, it specifies the number of positions to the left of the decimal point. Returns ------- Series object See Also -------- DataFrame.round Examples -------- >>> df = ks.Series([0.028208, 0.038683, 0.877076], name='x') >>> df 0 0.028208 1 0.038683 2 0.877076 Name: x, dtype: float64 >>> df.round(2) 0 0.03 1 0.04 2 0.88 Name: x, dtype: float64 """ if not isinstance(decimals, int): raise ValueError("decimals must be an integer") column_name = self.name scol = F.round(self._scol, decimals) return self._with_new_scol(scol).rename(column_name) # TODO: add 'interpolation' parameter. def quantile(self, q=0.5, accuracy=10000): """ Return value at the given quantile. .. note:: Unlike pandas', the quantile in Koalas is an approximated quantile based upon approximate percentile computation because computing quantile across a large dataset is extremely expensive. Parameters ---------- q : float or array-like, default 0.5 (50% quantile) 0 <= q <= 1, the quantile(s) to compute. accuracy : int, optional Default accuracy of approximation. Larger value means better accuracy. The relative error can be deduced by 1.0 / accuracy. Returns ------- float or Series If the current object is a Series and ``q`` is an array, a Series will be returned where the index is ``q`` and the values are the quantiles, otherwise a float will be returned. Examples -------- >>> s = ks.Series([1, 2, 3, 4, 5]) >>> s.quantile(.5) 3 >>> s.quantile([.25, .5, .75]) 0.25 2 0.5 3 0.75 4 Name: 0, dtype: int64 """ if not isinstance(accuracy, int): raise ValueError("accuracy must be an integer; however, got [%s]" % type(accuracy)) if isinstance(q, Iterable): q = list(q) for v in q if isinstance(q, list) else [q]: if not isinstance(v, float): raise ValueError( "q must be a float of an array of floats; however, [%s] found." % type(v)) if v < 0.0 or v > 1.0: raise ValueError( "percentiles should all be in the interval [0, 1].") if isinstance(q, list): quantiles = q # TODO: avoid to use dataframe. After this, anchor will be lost. # First calculate the percentiles and map it to each `quantiles` # by creating each entry as a struct. So, it becomes an array of # structs as below: # # +--------------------------------+ # | arrays | # +--------------------------------+ # |[[0.25, 2], [0.5, 3], [0.75, 4]]| # +--------------------------------+ sdf = self._kdf._sdf args = ", ".join(map(str, quantiles)) percentile_col = F.expr( "approx_percentile(`%s`, array(%s), %s)" % (self.name, args, accuracy)) sdf = sdf.select(percentile_col.alias("percentiles")) internal_index_column = "__index_level_0__" value_column = "value" cols = [] for i, quantile in enumerate(quantiles): cols.append(F.struct( F.lit("%s" % quantile).alias(internal_index_column), F.expr("percentiles[%s]" % i).alias(value_column))) sdf = sdf.select(F.array(*cols).alias("arrays")) # And then, explode it and manually set the index. # # +-----------------+-----+ # |__index_level_0__|value| # +-----------------+-----+ # | 0.25 | 2| # | 0.5 | 3| # | 0.75 | 4| # +-----------------+-----+ sdf = sdf.select(F.explode(F.col("arrays"))).selectExpr("col.*") internal = self._kdf._internal.copy( sdf=sdf, data_columns=[value_column], index_map=[(internal_index_column, None)], column_index=None, column_index_names=None) return DataFrame(internal)[value_column].rename(self.name) else: return self._reduce_for_stat_function( lambda _: F.expr("approx_percentile(`%s`, %s, %s)" % (self.name, q, accuracy)), name="median") # TODO: add axis, numeric_only, pct, na_option parameter def rank(self, method='average', ascending=True): """ Compute numerical data ranks (1 through n) along axis. Equal values are assigned a rank that is the average of the ranks of those values. .. note:: the current implementation of rank uses Spark's Window without specifying partition specification. This leads to move all data into single partition in single machine and could cause serious performance degradation. Avoid this method against very large dataset. Parameters ---------- method : {'average', 'min', 'max', 'first', 'dense'} * average: average rank of group * min: lowest rank in group * max: highest rank in group * first: ranks assigned in order they appear in the array * dense: like 'min', but rank always increases by 1 between groups ascending : boolean, default True False for ranks by high (1) to low (N) Returns ------- ranks : same type as caller Examples -------- >>> df = ks.DataFrame({'A': [1, 2, 2, 3], 'B': [4, 3, 2, 1]}, columns= ['A', 'B']) >>> df A B 0 1 4 1 2 3 2 2 2 3 3 1 >>> df.rank().sort_index() A B 0 1.0 4.0 1 2.5 3.0 2 2.5 2.0 3 4.0 1.0 If method is set to 'min', it use lowest rank in group. >>> df.rank(method='min').sort_index() A B 0 1.0 4.0 1 2.0 3.0 2 2.0 2.0 3 4.0 1.0 If method is set to 'max', it use highest rank in group. >>> df.rank(method='max').sort_index() A B 0 1.0 4.0 1 3.0 3.0 2 3.0 2.0 3 4.0 1.0 If method is set to 'dense', it leaves no gaps in group. >>> df.rank(method='dense').sort_index() A B 0 1.0 4.0 1 2.0 3.0 2 2.0 2.0 3 3.0 1.0 """ return self._rank(method, ascending) def _rank(self, method='average', ascending=True, part_cols=()): if method not in ['average', 'min', 'max', 'first', 'dense']: msg = "method must be one of 'average', 'min', 'max', 'first', 'dense'" raise ValueError(msg) if len(self._internal.index_columns) > 1: raise ValueError('rank do not support index now') if ascending: asc_func = spark.functions.asc else: asc_func = spark.functions.desc index_column = self._internal.index_columns[0] column_name = self._internal.data_columns[0] if method == 'first': window = Window.orderBy( asc_func(column_name), asc_func(index_column) ).partitionBy(*part_cols).rowsBetween(Window.unboundedPreceding, Window.currentRow) scol = F.row_number().over(window) elif method == 'dense': window = Window.orderBy(asc_func(column_name)).partitionBy(*part_cols) \ .rowsBetween(Window.unboundedPreceding, Window.currentRow) scol = F.dense_rank().over(window) else: if method == 'average': stat_func = F.mean elif method == 'min': stat_func = F.min elif method == 'max': stat_func = F.max window1 = Window.orderBy( asc_func(column_name) ).partitionBy(*part_cols).rowsBetween(Window.unboundedPreceding, Window.currentRow) window2 = Window.partitionBy( *[column_name] + list(part_cols) ).rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing) scol = stat_func(F.row_number().over(window1)).over(window2) kser = self._with_new_scol(scol).rename(self.name) return kser.astype(np.float64) def describe(self, percentiles: Optional[List[float]] = None) -> 'Series': return _col(self.to_dataframe().describe(percentiles)) describe.__doc__ = DataFrame.describe.__doc__ def diff(self, periods=1): """ First discrete difference of element. Calculates the difference of a Series element compared with another element in the DataFrame (default is the element in the same column of the previous row). .. note:: the current implementation of diff uses Spark's Window without specifying partition specification. This leads to move all data into single partition in single machine and could cause serious performance degradation. Avoid this method against very large dataset. Parameters ---------- periods : int, default 1 Periods to shift for calculating difference, accepts negative values. Returns ------- diffed : DataFrame Examples -------- >>> df = ks.DataFrame({'a': [1, 2, 3, 4, 5, 6], ... 'b': [1, 1, 2, 3, 5, 8], ... 'c': [1, 4, 9, 16, 25, 36]}, columns=['a', 'b', 'c']) >>> df a b c 0 1 1 1 1 2 1 4 2 3 2 9 3 4 3 16 4 5 5 25 5 6 8 36 >>> df.b.diff() 0 NaN 1 0.0 2 1.0 3 1.0 4 2.0 5 3.0 Name: b, dtype: float64 Difference with previous value >>> df.c.diff(periods=3) 0 NaN 1 NaN 2 NaN 3 15.0 4 21.0 5 27.0 Name: c, dtype: float64 Difference with following value >>> df.c.diff(periods=-1) 0 -3.0 1 -5.0 2 -7.0 3 -9.0 4 -11.0 5 NaN Name: c, dtype: float64 """ return self._diff(periods) def _diff(self, periods, part_cols=()): if not isinstance(periods, int): raise ValueError('periods should be an int; however, got [%s]' % type(periods)) window = Window.partitionBy(*part_cols).orderBy(self._internal.index_scols)\ .rowsBetween(-periods, -periods) scol = self._scol - F.lag(self._scol, periods).over(window) return self._with_new_scol(scol).rename(self.name) def idxmax(self, skipna=True): """ Return the row label of the maximum value. If multiple values equal the maximum, the row label with that value is returned. Parameters ---------- skipna : bool, default True Exclude NA/null values. If the entire Series is NA, the result will be NA. Returns ------- Index Label of the maximum value. Raises ------ ValueError If the Series is empty. See Also -------- Series.idxmin : Return index *label* of the first occurrence of minimum of values. Examples -------- >>> s = ks.Series(data=[1, None, 4, 3, 5], ... index=['A', 'B', 'C', 'D', 'E']) >>> s A 1.0 B NaN C 4.0 D 3.0 E 5.0 Name: 0, dtype: float64 >>> s.idxmax() 'E' If `skipna` is False and there is an NA value in the data, the function returns ``nan``. >>> s.idxmax(skipna=False) nan In case of multi-index, you get a tuple: >>> index = pd.MultiIndex.from_arrays([ ... ['a', 'a', 'b', 'b'], ['c', 'd', 'e', 'f']], names=('first', 'second')) >>> s = ks.Series(data=[1, None, 4, 5], index=index) >>> s first second a c 1.0 d NaN b e 4.0 f 5.0 Name: 0, dtype: float64 >>> s.idxmax() ('b', 'f') """ sdf = self._kdf._sdf scol = self._scol index_scols = self._kdf._internal.index_scols # desc_nulls_(last|first) is used via Py4J directly because # it's not supported in Spark 2.3. if skipna: sdf = sdf.orderBy(Column(scol._jc.desc_nulls_last())) else: sdf = sdf.orderBy(Column(scol._jc.desc_nulls_first())) results = sdf.select([scol] + index_scols).take(1) if len(results) == 0: raise ValueError("attempt to get idxmin of an empty sequence") if results[0][0] is None: # This will only happens when skipna is False because we will # place nulls first. return np.nan values = list(results[0][1:]) if len(values) == 1: return values[0] else: return tuple(values) def idxmin(self, skipna=True): """ Return the row label of the minimum value. If multiple values equal the minimum, the row label with that value is returned. Parameters ---------- skipna : bool, default True Exclude NA/null values. If the entire Series is NA, the result will be NA. Returns ------- Index Label of the minimum value. Raises ------ ValueError If the Series is empty. See Also -------- Series.idxmax : Return index *label* of the first occurrence of maximum of values. Notes ----- This method is the Series version of ``ndarray.argmin``. This method returns the label of the minimum, while ``ndarray.argmin`` returns the position. To get the position, use ``series.values.argmin()``. Examples -------- >>> s = ks.Series(data=[1, None, 4, 0], ... index=['A', 'B', 'C', 'D']) >>> s A 1.0 B NaN C 4.0 D 0.0 Name: 0, dtype: float64 >>> s.idxmin() 'D' If `skipna` is False and there is an NA value in the data, the function returns ``nan``. >>> s.idxmin(skipna=False) nan In case of multi-index, you get a tuple: >>> index = pd.MultiIndex.from_arrays([ ... ['a', 'a', 'b', 'b'], ['c', 'd', 'e', 'f']], names=('first', 'second')) >>> s = ks.Series(data=[1, None, 4, 0], index=index) >>> s first second a c 1.0 d NaN b e 4.0 f 0.0 Name: 0, dtype: float64 >>> s.idxmin() ('b', 'f') """ sdf = self._kdf._sdf scol = self._scol index_scols = self._kdf._internal.index_scols # asc_nulls_(list|first)is used via Py4J directly because # it's not supported in Spark 2.3. if skipna: sdf = sdf.orderBy(Column(scol._jc.asc_nulls_last())) else: sdf = sdf.orderBy(Column(scol._jc.asc_nulls_first())) results = sdf.select([scol] + index_scols).take(1) if len(results) == 0: raise ValueError("attempt to get idxmin of an empty sequence") if results[0][0] is None: # This will only happens when skipna is False because we will # place nulls first. return np.nan values = list(results[0][1:]) if len(values) == 1: return values[0] else: return tuple(values) def _cum(self, func, skipna, part_cols=()): # This is used to cummin, cummax, cumsum, etc. index_columns = self._internal.index_columns window = Window.orderBy( index_columns).partitionBy(*part_cols).rowsBetween( Window.unboundedPreceding, Window.currentRow) if skipna: # There is a behavior difference between pandas and PySpark. In case of cummax, # # Input: # A B # 0 2.0 1.0 # 1 5.0 NaN # 2 1.0 0.0 # 3 2.0 4.0 # 4 4.0 9.0 # # pandas: # A B # 0 2.0 1.0 # 1 5.0 NaN # 2 5.0 1.0 # 3 5.0 4.0 # 4 5.0 9.0 # # PySpark: # A B # 0 2.0 1.0 # 1 5.0 1.0 # 2 5.0 1.0 # 3 5.0 4.0 # 4 5.0 9.0 scol = F.when( # Manually sets nulls given the column defined above. self._scol.isNull(), F.lit(None) ).otherwise(func(self._scol).over(window)) else: # Here, we use two Windows. # One for real data. # The other one for setting nulls after the first null it meets. # # There is a behavior difference between pandas and PySpark. In case of cummax, # # Input: # A B # 0 2.0 1.0 # 1 5.0 NaN # 2 1.0 0.0 # 3 2.0 4.0 # 4 4.0 9.0 # # pandas: # A B # 0 2.0 1.0 # 1 5.0 NaN # 2 5.0 NaN # 3 5.0 NaN # 4 5.0 NaN # # PySpark: # A B # 0 2.0 1.0 # 1 5.0 1.0 # 2 5.0 1.0 # 3 5.0 4.0 # 4 5.0 9.0 scol = F.when( # By going through with max, it sets True after the first time it meets null. F.max(self._scol.isNull()).over(window), # Manually sets nulls given the column defined above. F.lit(None) ).otherwise(func(self._scol).over(window)) # cumprod uses exp(sum(log(...))) trick. if func.__name__ == "cumprod": scol = F.exp(scol) return self._with_new_scol(scol).rename(self.name) # ---------------------------------------------------------------------- # Accessor Methods # ---------------------------------------------------------------------- dt = CachedAccessor("dt", DatetimeMethods) str = CachedAccessor("str", StringMethods) # ---------------------------------------------------------------------- def _reduce_for_stat_function(self, sfun, name, axis=None, numeric_only=None): """ Applies sfun to the column and returns a scalar Parameters ---------- sfun : the stats function to be used for aggregation name : original pandas API name. axis : used only for sanity check because series only support index axis. numeric_only : not used by this implementation, but passed down by stats functions """ from inspect import signature if axis in ('columns', 1): raise ValueError("Series does not support columns axis.") num_args = len(signature(sfun).parameters) col_sdf = self._scol col_type = self.spark_type if isinstance(col_type, BooleanType) and sfun.__name__ not in ('min', 'max'): # Stat functions cannot be used with boolean values by default # Thus, cast to integer (true to 1 and false to 0) # Exclude the min and max methods though since those work with booleans col_sdf = col_sdf.cast('integer') if num_args == 1: # Only pass in the column if sfun accepts only one arg col_sdf = sfun(col_sdf) else: # must be 2 assert num_args == 2 # Pass in both the column and its data type if sfun accepts two args col_sdf = sfun(col_sdf, col_type) return _unpack_scalar(self._kdf._sdf.select(col_sdf)) def __len__(self): return len(self.to_dataframe()) def __getitem__(self, key): return Series(self._scol.__getitem__(key), anchor=self._kdf, index=self._index_map) def __getattr__(self, item: str_type) -> Any: if item.startswith("__") or item.startswith("_pandas_") or item.startswith("_spark_"): raise AttributeError(item) if hasattr(_MissingPandasLikeSeries, item): property_or_func = getattr(_MissingPandasLikeSeries, item) if isinstance(property_or_func, property): return property_or_func.fget(self) # type: ignore else: return partial(property_or_func, self) return self.getField(item) def __str__(self): return self._pandas_orig_repr() def _to_internal_pandas(self): """ Return a pandas Series directly from _internal to avoid overhead of copy. This method is for internal use only. """ return _col(self._internal.pandas_df) def __repr__(self): max_display_count = get_option("display.max_rows") if max_display_count is None: return self._to_internal_pandas().to_string(name=self.name, dtype=self.dtype) pser = self.head(max_display_count + 1)._to_internal_pandas() pser_length = len(pser) pser = pser.iloc[:max_display_count] if pser_length > max_display_count: repr_string = pser.to_string(length=True) rest, prev_footer = repr_string.rsplit("\n", 1) match = REPR_PATTERN.search(prev_footer) if match is not None: length = match.group("length") name = str(self.dtype.name) footer = ("\nName: {name}, dtype: {dtype}\nShowing only the first {length}" .format(length=length, name=self.name, dtype=pprint_thing(name))) return rest + footer return pser.to_string(name=self.name, dtype=self.dtype) def __dir__(self): if not isinstance(self.schema, StructType): fields = [] else: fields = [f for f in self.schema.fieldNames() if ' ' not in f] return super(Series, self).__dir__() + fields def _pandas_orig_repr(self): # TODO: figure out how to reuse the original one. return 'Column<%s>' % self._scol._jc.toString().encode('utf8') def _equals(self, other: 'Series') -> bool: return self._scol._jc.equals(other._scol._jc) def _unpack_scalar(sdf): """ Takes a dataframe that is supposed to contain a single row with a single scalar value, and returns this value. """ l = sdf.head(2) assert len(l) == 1, (sdf, l) row = l[0] l2 = list(row.asDict().values()) assert len(l2) == 1, (row, l2) return l2[0] def _col(df): assert isinstance(df, (DataFrame, pd.DataFrame)), type(df) return df[df.columns[0]]
1
11,697
nit: no need this change anymore.
databricks-koalas
py
@@ -1967,9 +1967,7 @@ class Codebase $this->taint_flow_graph->addSource($source); - $expr_type->parent_nodes = [ - $source->id => $source, - ]; + $expr_type->parent_nodes[$source->id] = $source; } /**
1
<?php namespace Psalm; use Exception; use InvalidArgumentException; use LanguageServerProtocol\Command; use LanguageServerProtocol\CompletionItem; use LanguageServerProtocol\CompletionItemKind; use LanguageServerProtocol\InsertTextFormat; use LanguageServerProtocol\ParameterInformation; use LanguageServerProtocol\Position; use LanguageServerProtocol\Range; use LanguageServerProtocol\SignatureInformation; use LanguageServerProtocol\TextEdit; use PhpParser; use PhpParser\Node\Arg; use Psalm\CodeLocation; use Psalm\CodeLocation\Raw; use Psalm\Exception\UnanalyzedFileException; use Psalm\Exception\UnpopulatedClasslikeException; use Psalm\Internal\Analyzer\FunctionLikeAnalyzer; use Psalm\Internal\Analyzer\NamespaceAnalyzer; use Psalm\Internal\Analyzer\ProjectAnalyzer; use Psalm\Internal\Analyzer\Statements\Block\ForeachAnalyzer; use Psalm\Internal\Analyzer\Statements\Expression\Fetch\ConstFetchAnalyzer; use Psalm\Internal\Analyzer\Statements\Expression\Fetch\VariableFetchAnalyzer; use Psalm\Internal\Analyzer\StatementsAnalyzer; use Psalm\Internal\Codebase\Analyzer; use Psalm\Internal\Codebase\ClassLikes; use Psalm\Internal\Codebase\Functions; use Psalm\Internal\Codebase\InternalCallMapHandler; use Psalm\Internal\Codebase\Methods; use Psalm\Internal\Codebase\Populator; use Psalm\Internal\Codebase\Properties; use Psalm\Internal\Codebase\Reflection; use Psalm\Internal\Codebase\Scanner; use Psalm\Internal\Codebase\TaintFlowGraph; use Psalm\Internal\DataFlow\TaintSink; use Psalm\Internal\DataFlow\TaintSource; use Psalm\Internal\MethodIdentifier; use Psalm\Internal\Provider\ClassLikeStorageProvider; use Psalm\Internal\Provider\FileProvider; use Psalm\Internal\Provider\FileReferenceProvider; use Psalm\Internal\Provider\FileStorageProvider; use Psalm\Internal\Provider\Providers; use Psalm\Internal\Provider\StatementsProvider; use Psalm\Internal\Type\Comparator\UnionTypeComparator; use Psalm\Progress\Progress; use Psalm\Progress\VoidProgress; use Psalm\Storage\ClassLikeStorage; use Psalm\Storage\FileStorage; use Psalm\Storage\FunctionLikeParameter; use Psalm\Storage\FunctionLikeStorage; use Psalm\Storage\FunctionStorage; use Psalm\Storage\MethodStorage; use Psalm\Type\Atomic; use Psalm\Type\Atomic\TBool; use Psalm\Type\Atomic\TClassConstant; use Psalm\Type\Atomic\TKeyedArray; use Psalm\Type\Atomic\TLiteralInt; use Psalm\Type\Atomic\TLiteralString; use Psalm\Type\Atomic\TNamedObject; use Psalm\Type\TaintKindGroup; use Psalm\Type\Union; use ReflectionProperty; use ReflectionType; use UnexpectedValueException; use function array_combine; use function array_merge; use function array_pop; use function array_reverse; use function count; use function dirname; use function error_log; use function explode; use function implode; use function in_array; use function intdiv; use function is_numeric; use function is_string; use function krsort; use function ksort; use function preg_match; use function preg_replace; use function strlen; use function strpos; use function strrpos; use function strtolower; use function substr; use function substr_count; use const PHP_VERSION_ID; class Codebase { /** * @var Config */ public $config; /** * A map of fully-qualified use declarations to the files * that reference them (keyed by filename) * * @var array<lowercase-string, array<int, CodeLocation>> */ public $use_referencing_locations = []; /** * @var FileStorageProvider */ public $file_storage_provider; /** * @var ClassLikeStorageProvider */ public $classlike_storage_provider; /** * @var bool */ public $collect_references = false; /** * @var bool */ public $collect_locations = false; /** * @var null|'always'|'auto' */ public $find_unused_code; /** * @var FileProvider */ public $file_provider; /** * @var FileReferenceProvider */ public $file_reference_provider; /** * @var StatementsProvider */ public $statements_provider; /** * @var Progress */ private $progress; /** * @var array<string, Union> */ private static $stubbed_constants = []; /** * Whether to register autoloaded information * * @var bool */ public $register_autoload_files = false; /** * Whether to log functions just at the file level or globally (for stubs) * * @var bool */ public $register_stub_files = false; /** * @var bool */ public $find_unused_variables = false; /** * @var Scanner */ public $scanner; /** * @var Analyzer */ public $analyzer; /** * @var Functions */ public $functions; /** * @var ClassLikes */ public $classlikes; /** * @var Methods */ public $methods; /** * @var Properties */ public $properties; /** * @var Populator */ public $populator; /** * @var ?TaintFlowGraph */ public $taint_flow_graph; /** * @var bool */ public $server_mode = false; /** * @var bool */ public $store_node_types = false; /** * Whether or not to infer types from usage. Computationally expensive, so turned off by default * * @var bool */ public $infer_types_from_usage = false; /** * @var bool */ public $alter_code = false; /** * @var bool */ public $diff_methods = false; /** * @var array<lowercase-string, string> */ public $methods_to_move = []; /** * @var array<lowercase-string, string> */ public $methods_to_rename = []; /** * @var array<string, string> */ public $properties_to_move = []; /** * @var array<string, string> */ public $properties_to_rename = []; /** * @var array<string, string> */ public $class_constants_to_move = []; /** * @var array<string, string> */ public $class_constants_to_rename = []; /** * @var array<lowercase-string, string> */ public $classes_to_move = []; /** * @var array<lowercase-string, string> */ public $call_transforms = []; /** * @var array<string, string> */ public $property_transforms = []; /** * @var array<string, string> */ public $class_constant_transforms = []; /** * @var array<lowercase-string, string> */ public $class_transforms = []; /** * @var bool */ public $allow_backwards_incompatible_changes = true; /** @var int */ public $analysis_php_version_id = PHP_VERSION_ID; /** @var 'cli'|'config'|'composer'|'tests'|'runtime' */ public $php_version_source = 'runtime'; /** * @var bool */ public $track_unused_suppressions = false; public function __construct( Config $config, Providers $providers, ?Progress $progress = null ) { if ($progress === null) { $progress = new VoidProgress(); } $this->config = $config; $this->file_storage_provider = $providers->file_storage_provider; $this->classlike_storage_provider = $providers->classlike_storage_provider; $this->progress = $progress; $this->file_provider = $providers->file_provider; $this->file_reference_provider = $providers->file_reference_provider; $this->statements_provider = $providers->statements_provider; self::$stubbed_constants = []; $reflection = new Reflection($providers->classlike_storage_provider, $this); $this->scanner = new Scanner( $this, $config, $providers->file_storage_provider, $providers->file_provider, $reflection, $providers->file_reference_provider, $progress ); $this->loadAnalyzer(); $this->functions = new Functions($providers->file_storage_provider, $reflection); $this->classlikes = new ClassLikes( $this->config, $providers->classlike_storage_provider, $providers->file_reference_provider, $providers->statements_provider, $this->scanner ); $this->properties = new Properties( $providers->classlike_storage_provider, $providers->file_reference_provider, $this->classlikes ); $this->methods = new Methods( $providers->classlike_storage_provider, $providers->file_reference_provider, $this->classlikes ); $this->populator = new Populator( $providers->classlike_storage_provider, $providers->file_storage_provider, $this->classlikes, $providers->file_reference_provider, $progress ); $this->loadAnalyzer(); } private function loadAnalyzer(): void { $this->analyzer = new Analyzer( $this->config, $this->file_provider, $this->file_storage_provider, $this->progress ); } /** * @param array<string> $candidate_files * */ public function reloadFiles(ProjectAnalyzer $project_analyzer, array $candidate_files): void { $this->loadAnalyzer(); $this->file_reference_provider->loadReferenceCache(false); FunctionLikeAnalyzer::clearCache(); if (!$this->statements_provider->parser_cache_provider) { $diff_files = $candidate_files; } else { $diff_files = []; $parser_cache_provider = $this->statements_provider->parser_cache_provider; foreach ($candidate_files as $candidate_file_path) { if ($parser_cache_provider->loadExistingFileContentsFromCache($candidate_file_path) !== $this->file_provider->getContents($candidate_file_path) ) { $diff_files[] = $candidate_file_path; } } } $referenced_files = $project_analyzer->getReferencedFilesFromDiff($diff_files, false); foreach ($diff_files as $diff_file_path) { $this->invalidateInformationForFile($diff_file_path); } foreach ($referenced_files as $referenced_file_path) { if (in_array($referenced_file_path, $diff_files, true)) { continue; } $file_storage = $this->file_storage_provider->get($referenced_file_path); foreach ($file_storage->classlikes_in_file as $fq_classlike_name) { $this->classlike_storage_provider->remove($fq_classlike_name); $this->classlikes->removeClassLike($fq_classlike_name); } $this->file_storage_provider->remove($referenced_file_path); $this->scanner->removeFile($referenced_file_path); } $referenced_files = array_combine($referenced_files, $referenced_files); $this->scanner->addFilesToDeepScan($referenced_files); $this->addFilesToAnalyze(array_combine($candidate_files, $candidate_files)); $this->scanner->scanFiles($this->classlikes); $this->file_reference_provider->updateReferenceCache($this, $referenced_files); $this->populator->populateCodebase(); } public function enterServerMode(): void { $this->server_mode = true; $this->store_node_types = true; } public function collectLocations(): void { $this->collect_locations = true; $this->classlikes->collect_locations = true; $this->methods->collect_locations = true; $this->properties->collect_locations = true; } /** * @param 'always'|'auto' $find_unused_code * */ public function reportUnusedCode(string $find_unused_code = 'auto'): void { $this->collect_references = true; $this->classlikes->collect_references = true; $this->find_unused_code = $find_unused_code; $this->find_unused_variables = true; } public function reportUnusedVariables(): void { $this->collect_references = true; $this->find_unused_variables = true; } /** * @param array<string, string> $files_to_analyze * */ public function addFilesToAnalyze(array $files_to_analyze): void { $this->scanner->addFilesToDeepScan($files_to_analyze); $this->analyzer->addFilesToAnalyze($files_to_analyze); } /** * Scans all files their related files * */ public function scanFiles(int $threads = 1): void { $has_changes = $this->scanner->scanFiles($this->classlikes, $threads); if ($has_changes) { $this->populator->populateCodebase(); } } public function getFileContents(string $file_path): string { return $this->file_provider->getContents($file_path); } /** * @return list<PhpParser\Node\Stmt> */ public function getStatementsForFile(string $file_path): array { return $this->statements_provider->getStatementsForFile( $file_path, $this->analysis_php_version_id, $this->progress ); } public function createClassLikeStorage(string $fq_classlike_name): ClassLikeStorage { return $this->classlike_storage_provider->create($fq_classlike_name); } public function cacheClassLikeStorage(ClassLikeStorage $classlike_storage, string $file_path): void { $file_contents = $this->file_provider->getContents($file_path); if ($this->classlike_storage_provider->cache) { $this->classlike_storage_provider->cache->writeToCache($classlike_storage, $file_path, $file_contents); } } public function exhumeClassLikeStorage(string $fq_classlike_name, string $file_path): void { $file_contents = $this->file_provider->getContents($file_path); $storage = $this->classlike_storage_provider->exhume( $fq_classlike_name, $file_path, $file_contents ); if ($storage->is_trait) { $this->classlikes->addFullyQualifiedTraitName($storage->name, $file_path); } elseif ($storage->is_interface) { $this->classlikes->addFullyQualifiedInterfaceName($storage->name, $file_path); } else { $this->classlikes->addFullyQualifiedClassName($storage->name, $file_path); } } public static function getPsalmTypeFromReflection(?ReflectionType $type): Union { return Reflection::getPsalmTypeFromReflectionType($type); } public function createFileStorageForPath(string $file_path): FileStorage { return $this->file_storage_provider->create($file_path); } /** * @return array<int, CodeLocation> */ public function findReferencesToSymbol(string $symbol): array { if (!$this->collect_locations) { throw new UnexpectedValueException('Should not be checking references'); } if (strpos($symbol, '::$') !== false) { return $this->findReferencesToProperty($symbol); } if (strpos($symbol, '::') !== false) { return $this->findReferencesToMethod($symbol); } return $this->findReferencesToClassLike($symbol); } /** * @return array<int, CodeLocation> */ public function findReferencesToMethod(string $method_id): array { return $this->file_reference_provider->getClassMethodLocations(strtolower($method_id)); } /** * @return array<int, CodeLocation> */ public function findReferencesToProperty(string $property_id): array { [$fq_class_name, $property_name] = explode('::', $property_id); return $this->file_reference_provider->getClassPropertyLocations( strtolower($fq_class_name) . '::' . $property_name ); } /** * @return CodeLocation[] * * @psalm-return array<int, CodeLocation> */ public function findReferencesToClassLike(string $fq_class_name): array { $fq_class_name_lc = strtolower($fq_class_name); $locations = $this->file_reference_provider->getClassLocations($fq_class_name_lc); if (isset($this->use_referencing_locations[$fq_class_name_lc])) { $locations = array_merge($locations, $this->use_referencing_locations[$fq_class_name_lc]); } return $locations; } public function getClosureStorage(string $file_path, string $closure_id): FunctionStorage { $file_storage = $this->file_storage_provider->get($file_path); // closures can be returned here if (isset($file_storage->functions[$closure_id])) { return $file_storage->functions[$closure_id]; } throw new UnexpectedValueException( 'Expecting ' . $closure_id . ' to have storage in ' . $file_path ); } public function addGlobalConstantType(string $const_id, Union $type): void { self::$stubbed_constants[$const_id] = $type; } public function getStubbedConstantType(string $const_id): ?Union { return self::$stubbed_constants[$const_id] ?? null; } /** * @return array<string, Union> */ public function getAllStubbedConstants(): array { return self::$stubbed_constants; } public function fileExists(string $file_path): bool { return $this->file_provider->fileExists($file_path); } /** * Check whether a class/interface exists */ public function classOrInterfaceExists( string $fq_class_name, ?CodeLocation $code_location = null, ?string $calling_fq_class_name = null, ?string $calling_method_id = null ): bool { return $this->classlikes->classOrInterfaceExists( $fq_class_name, $code_location, $calling_fq_class_name, $calling_method_id ); } /** * Check whether a class/interface exists */ public function classOrInterfaceOrEnumExists( string $fq_class_name, ?CodeLocation $code_location = null, ?string $calling_fq_class_name = null, ?string $calling_method_id = null ): bool { return $this->classlikes->classOrInterfaceOrEnumExists( $fq_class_name, $code_location, $calling_fq_class_name, $calling_method_id ); } public function classExtendsOrImplements(string $fq_class_name, string $possible_parent): bool { return $this->classlikes->classExtends($fq_class_name, $possible_parent) || $this->classlikes->classImplements($fq_class_name, $possible_parent); } /** * Determine whether or not a given class exists */ public function classExists( string $fq_class_name, ?CodeLocation $code_location = null, ?string $calling_fq_class_name = null, ?string $calling_method_id = null ): bool { return $this->classlikes->classExists( $fq_class_name, $code_location, $calling_fq_class_name, $calling_method_id ); } /** * Determine whether or not a class extends a parent * * @throws UnpopulatedClasslikeException when called on unpopulated class * @throws InvalidArgumentException when class does not exist */ public function classExtends(string $fq_class_name, string $possible_parent): bool { return $this->classlikes->classExtends($fq_class_name, $possible_parent, true); } /** * Check whether a class implements an interface */ public function classImplements(string $fq_class_name, string $interface): bool { return $this->classlikes->classImplements($fq_class_name, $interface); } public function interfaceExists( string $fq_interface_name, ?CodeLocation $code_location = null, ?string $calling_fq_class_name = null, ?string $calling_method_id = null ): bool { return $this->classlikes->interfaceExists( $fq_interface_name, $code_location, $calling_fq_class_name, $calling_method_id ); } public function interfaceExtends(string $interface_name, string $possible_parent): bool { return $this->classlikes->interfaceExtends($interface_name, $possible_parent); } /** * @return array<string, string> all interfaces extended by $interface_name */ public function getParentInterfaces(string $fq_interface_name): array { return $this->classlikes->getParentInterfaces( $this->classlikes->getUnAliasedName($fq_interface_name) ); } /** * Determine whether or not a class has the correct casing */ public function classHasCorrectCasing(string $fq_class_name): bool { return $this->classlikes->classHasCorrectCasing($fq_class_name); } public function interfaceHasCorrectCasing(string $fq_interface_name): bool { return $this->classlikes->interfaceHasCorrectCasing($fq_interface_name); } public function traitHasCorrectCase(string $fq_trait_name): bool { return $this->classlikes->traitHasCorrectCase($fq_trait_name); } /** * Given a function id, return the function like storage for * a method, closure, or function. * * @param non-empty-string $function_id * * @return FunctionStorage|MethodStorage */ public function getFunctionLikeStorage( StatementsAnalyzer $statements_analyzer, string $function_id ): FunctionLikeStorage { $doesMethodExist = MethodIdentifier::isValidMethodIdReference($function_id) && $this->methodExists($function_id); if ($doesMethodExist) { $method_id = MethodIdentifier::wrap($function_id); $declaring_method_id = $this->methods->getDeclaringMethodId($method_id); if (!$declaring_method_id) { throw new UnexpectedValueException('Declaring method for ' . $method_id . ' cannot be found'); } return $this->methods->getStorage($declaring_method_id); } return $this->functions->getStorage($statements_analyzer, strtolower($function_id)); } /** * Whether or not a given method exists * * @param string|MethodIdentifier $method_id * @param string|MethodIdentifier|null $calling_method_id */ public function methodExists( $method_id, ?CodeLocation $code_location = null, $calling_method_id = null, ?string $file_path = null, bool $is_used = true ): bool { return $this->methods->methodExists( MethodIdentifier::wrap($method_id), is_string($calling_method_id) ? strtolower($calling_method_id) : strtolower((string) $calling_method_id), $code_location, null, $file_path, true, $is_used ); } /** * @param string|MethodIdentifier $method_id * * @return array<int, FunctionLikeParameter> */ public function getMethodParams($method_id): array { return $this->methods->getMethodParams(MethodIdentifier::wrap($method_id)); } /** * @param string|MethodIdentifier $method_id * */ public function isVariadic($method_id): bool { return $this->methods->isVariadic(MethodIdentifier::wrap($method_id)); } /** * @param string|MethodIdentifier $method_id * @param list<Arg> $call_args * */ public function getMethodReturnType($method_id, ?string &$self_class, array $call_args = []): ?Union { return $this->methods->getMethodReturnType( MethodIdentifier::wrap($method_id), $self_class, null, $call_args ); } /** * @param string|MethodIdentifier $method_id * */ public function getMethodReturnsByRef($method_id): bool { return $this->methods->getMethodReturnsByRef(MethodIdentifier::wrap($method_id)); } /** * @param string|MethodIdentifier $method_id * @param CodeLocation|null $defined_location * */ public function getMethodReturnTypeLocation( $method_id, CodeLocation &$defined_location = null ): ?CodeLocation { return $this->methods->getMethodReturnTypeLocation( MethodIdentifier::wrap($method_id), $defined_location ); } /** * @param string|MethodIdentifier $method_id * */ public function getDeclaringMethodId($method_id): ?string { $new_method_id = $this->methods->getDeclaringMethodId(MethodIdentifier::wrap($method_id)); return $new_method_id ? (string) $new_method_id : null; } /** * Get the class this method appears in (vs is declared in, which could give a trait) * * @param string|MethodIdentifier $method_id * */ public function getAppearingMethodId($method_id): ?string { $new_method_id = $this->methods->getAppearingMethodId(MethodIdentifier::wrap($method_id)); return $new_method_id ? (string) $new_method_id : null; } /** * @param string|MethodIdentifier $method_id * * @return array<string, MethodIdentifier> */ public function getOverriddenMethodIds($method_id): array { return $this->methods->getOverriddenMethodIds(MethodIdentifier::wrap($method_id)); } /** * @param string|MethodIdentifier $method_id * */ public function getCasedMethodId($method_id): string { return $this->methods->getCasedMethodId(MethodIdentifier::wrap($method_id)); } public function invalidateInformationForFile(string $file_path): void { $this->scanner->removeFile($file_path); try { $file_storage = $this->file_storage_provider->get($file_path); } catch (InvalidArgumentException $e) { return; } foreach ($file_storage->classlikes_in_file as $fq_classlike_name) { $this->classlike_storage_provider->remove($fq_classlike_name); $this->classlikes->removeClassLike($fq_classlike_name); } $this->file_storage_provider->remove($file_path); } public function getFunctionStorageForSymbol(string $file_path, string $symbol): ?FunctionLikeStorage { if (strpos($symbol, '::')) { $symbol = substr($symbol, 0, -2); /** @psalm-suppress ArgumentTypeCoercion */ $method_id = new MethodIdentifier(...explode('::', $symbol)); $declaring_method_id = $this->methods->getDeclaringMethodId($method_id); if (!$declaring_method_id) { return null; } return $this->methods->getStorage($declaring_method_id); } $function_id = strtolower(substr($symbol, 0, -2)); $file_storage = $this->file_storage_provider->get($file_path); if (isset($file_storage->functions[$function_id])) { return $file_storage->functions[$function_id]; } if (!$function_id) { return null; } return $this->functions->getStorage(null, $function_id); } /** * @param string $file_path * @param string $symbol * @return array{ type: string, description?: string|null}|null */ public function getSymbolInformation(string $file_path, string $symbol): ?array { if (is_numeric($symbol[0])) { return ['type' => preg_replace('/^[^:]*:/', '', $symbol)]; } try { if (strpos($symbol, '::')) { if (strpos($symbol, '()')) { $symbol = substr($symbol, 0, -2); /** @psalm-suppress ArgumentTypeCoercion */ $method_id = new MethodIdentifier(...explode('::', $symbol)); $declaring_method_id = $this->methods->getDeclaringMethodId($method_id); if (!$declaring_method_id) { return null; } $storage = $this->methods->getStorage($declaring_method_id); return [ 'type' => '<?php ' . $storage->getSignature(true), 'description' => $storage->description, ]; } [, $symbol_name] = explode('::', $symbol); if (strpos($symbol, '$') !== false) { $storage = $this->properties->getStorage($symbol); return [ 'type' => '<?php ' . $storage->getInfo() . ' ' . $symbol_name, 'description' => $storage->description, ]; } [$fq_classlike_name, $const_name] = explode('::', $symbol); $class_constants = $this->classlikes->getConstantsForClass( $fq_classlike_name, ReflectionProperty::IS_PRIVATE ); if (!isset($class_constants[$const_name])) { return null; } return [ 'type' => '<?php ' . $const_name, 'description' => $class_constants[$const_name]->description, ]; } if (strpos($symbol, '()')) { $function_id = strtolower(substr($symbol, 0, -2)); $file_storage = $this->file_storage_provider->get($file_path); if (isset($file_storage->functions[$function_id])) { $function_storage = $file_storage->functions[$function_id]; return [ 'type' => '<?php ' . $function_storage->getSignature(true), 'description' => $function_storage->description, ]; } if (!$function_id) { return null; } $function = $this->functions->getStorage(null, $function_id); return [ 'type' => '<?php ' . $function->getSignature(true), 'description' => $function->description, ]; } if (strpos($symbol, '$') === 0) { $type = VariableFetchAnalyzer::getGlobalType($symbol); if (!$type->isMixed()) { return ['type' => '<?php ' . $type]; } } try { $storage = $this->classlike_storage_provider->get($symbol); return [ 'type' => '<?php ' . ($storage->abstract ? 'abstract ' : '') . 'class ' . $storage->name, 'description' => $storage->description, ]; } catch (InvalidArgumentException $e) { } if (strpos($symbol, '\\')) { $const_name_parts = explode('\\', $symbol); $const_name = array_pop($const_name_parts); $namespace_name = implode('\\', $const_name_parts); $namespace_constants = NamespaceAnalyzer::getConstantsForNamespace( $namespace_name, ReflectionProperty::IS_PUBLIC ); if (isset($namespace_constants[$const_name])) { $type = $namespace_constants[$const_name]; return ['type' => '<?php const ' . $symbol . ' ' . $type]; } } else { $file_storage = $this->file_storage_provider->get($file_path); if (isset($file_storage->constants[$symbol])) { return ['type' => '<?php const ' . $symbol . ' ' . $file_storage->constants[$symbol]]; } $constant = ConstFetchAnalyzer::getGlobalConstType($this, $symbol, $symbol); if ($constant) { return ['type' => '<?php const ' . $symbol . ' ' . $constant]; } } return null; } catch (Exception $e) { error_log($e->getMessage()); return null; } } public function getSymbolLocation(string $file_path, string $symbol): ?CodeLocation { if (is_numeric($symbol[0])) { $symbol = preg_replace('/:.*/', '', $symbol); $symbol_parts = explode('-', $symbol); $file_contents = $this->getFileContents($file_path); return new Raw( $file_contents, $file_path, $this->config->shortenFileName($file_path), (int) $symbol_parts[0], (int) $symbol_parts[1] ); } try { if (strpos($symbol, '::')) { if (strpos($symbol, '()')) { $symbol = substr($symbol, 0, -2); /** @psalm-suppress ArgumentTypeCoercion */ $method_id = new MethodIdentifier(...explode('::', $symbol)); $declaring_method_id = $this->methods->getDeclaringMethodId($method_id); if (!$declaring_method_id) { return null; } $storage = $this->methods->getStorage($declaring_method_id); return $storage->location; } if (strpos($symbol, '$') !== false) { $storage = $this->properties->getStorage($symbol); return $storage->location; } [$fq_classlike_name, $const_name] = explode('::', $symbol); $class_constants = $this->classlikes->getConstantsForClass( $fq_classlike_name, ReflectionProperty::IS_PRIVATE ); if (!isset($class_constants[$const_name])) { return null; } return $class_constants[$const_name]->location; } if (strpos($symbol, '()')) { $file_storage = $this->file_storage_provider->get($file_path); $function_id = strtolower(substr($symbol, 0, -2)); if (isset($file_storage->functions[$function_id])) { return $file_storage->functions[$function_id]->location; } if (!$function_id) { return null; } return $this->functions->getStorage(null, $function_id)->location; } return $this->classlike_storage_provider->get($symbol)->location; } catch (UnexpectedValueException $e) { error_log($e->getMessage()); return null; } catch (InvalidArgumentException $e) { return null; } } /** * @return array{0: string, 1: Range}|null */ public function getReferenceAtPosition(string $file_path, Position $position): ?array { $is_open = $this->file_provider->isOpen($file_path); if (!$is_open) { throw new UnanalyzedFileException($file_path . ' is not open'); } $file_contents = $this->getFileContents($file_path); $offset = $position->toOffset($file_contents); [$reference_map, $type_map] = $this->analyzer->getMapsForFile($file_path); $reference = null; if (!$reference_map && !$type_map) { return null; } $reference_start_pos = null; $reference_end_pos = null; ksort($reference_map); foreach ($reference_map as $start_pos => [$end_pos, $possible_reference]) { if ($offset < $start_pos) { break; } if ($offset > $end_pos) { continue; } $reference_start_pos = $start_pos; $reference_end_pos = $end_pos; $reference = $possible_reference; } if ($reference === null || $reference_start_pos === null || $reference_end_pos === null) { return null; } $range = new Range( self::getPositionFromOffset($reference_start_pos, $file_contents), self::getPositionFromOffset($reference_end_pos, $file_contents) ); return [$reference, $range]; } /** * @return array{0: non-empty-string, 1: int, 2: Range}|null */ public function getFunctionArgumentAtPosition(string $file_path, Position $position): ?array { $is_open = $this->file_provider->isOpen($file_path); if (!$is_open) { throw new UnanalyzedFileException($file_path . ' is not open'); } $file_contents = $this->getFileContents($file_path); $offset = $position->toOffset($file_contents); [, , $argument_map] = $this->analyzer->getMapsForFile($file_path); $reference = null; $argument_number = null; if (!$argument_map) { return null; } $start_pos = null; $end_pos = null; ksort($argument_map); foreach ($argument_map as $start_pos => [$end_pos, $possible_reference, $possible_argument_number]) { if ($offset < $start_pos) { break; } if ($offset > $end_pos) { continue; } $reference = $possible_reference; $argument_number = $possible_argument_number; } if ($reference === null || $start_pos === null || $end_pos === null || $argument_number === null) { return null; } $range = new Range( self::getPositionFromOffset($start_pos, $file_contents), self::getPositionFromOffset($end_pos, $file_contents) ); return [$reference, $argument_number, $range]; } /** * @param non-empty-string $function_symbol */ public function getSignatureInformation( string $function_symbol, string $file_path = null ): ?SignatureInformation { $signature_label = ''; $signature_documentation = null; if (strpos($function_symbol, '::') !== false) { /** @psalm-suppress ArgumentTypeCoercion */ $method_id = new MethodIdentifier(...explode('::', $function_symbol)); $declaring_method_id = $this->methods->getDeclaringMethodId($method_id); if ($declaring_method_id === null) { return null; } $method_storage = $this->methods->getStorage($declaring_method_id); $params = $method_storage->params; $signature_label = $method_storage->cased_name; $signature_documentation = $method_storage->description; } else { try { if ($file_path) { $function_storage = $this->functions->getStorage( null, strtolower($function_symbol), dirname($file_path), $file_path ); } else { $function_storage = $this->functions->getStorage(null, strtolower($function_symbol)); } $params = $function_storage->params; $signature_label = $function_storage->cased_name; $signature_documentation = $function_storage->description; } catch (Exception $exception) { if (InternalCallMapHandler::inCallMap($function_symbol)) { $callables = InternalCallMapHandler::getCallablesFromCallMap($function_symbol); if (!$callables || !$callables[0]->params) { return null; } $params = $callables[0]->params; } else { return null; } } } $signature_label .= '('; $parameters = []; foreach ($params as $i => $param) { $parameter_label = ($param->type ?: 'mixed') . ' $' . $param->name; $parameters[] = new ParameterInformation( [ strlen($signature_label), strlen($signature_label) + strlen($parameter_label), ], $param->description ?? null ); $signature_label .= $parameter_label; if ($i < (count($params) - 1)) { $signature_label .= ', '; } } $signature_label .= ')'; return new SignatureInformation( $signature_label, $parameters, $signature_documentation ); } /** * @return array{0: string, 1: '->'|'::'|'['|'symbol', 2: int}|null */ public function getCompletionDataAtPosition(string $file_path, Position $position): ?array { $is_open = $this->file_provider->isOpen($file_path); if (!$is_open) { throw new UnanalyzedFileException($file_path . ' is not open'); } $file_contents = $this->getFileContents($file_path); $offset = $position->toOffset($file_contents); [$reference_map, $type_map] = $this->analyzer->getMapsForFile($file_path); if (!$reference_map && !$type_map) { return null; } krsort($type_map); foreach ($type_map as $start_pos => [$end_pos_excluding_whitespace, $possible_type]) { if ($offset < $start_pos) { continue; } $num_whitespace_bytes = preg_match('/\G\s+/', $file_contents, $matches, 0, $end_pos_excluding_whitespace) ? strlen($matches[0]) : 0; $end_pos = $end_pos_excluding_whitespace + $num_whitespace_bytes; if ($offset - $end_pos === 1) { $candidate_gap = substr($file_contents, $end_pos, 1); if ($candidate_gap === '[') { $gap = $candidate_gap; $recent_type = $possible_type; if ($recent_type === 'mixed') { return null; } return [$recent_type, $gap, $offset]; } } if ($offset - $end_pos === 2 || $offset - $end_pos === 3) { $candidate_gap = substr($file_contents, $end_pos, 2); if ($candidate_gap === '->' || $candidate_gap === '::') { $gap = $candidate_gap; $recent_type = $possible_type; if ($recent_type === 'mixed') { return null; } return [$recent_type, $gap, $offset]; } } } foreach ($reference_map as $start_pos => [$end_pos, $possible_reference]) { if ($offset < $start_pos) { continue; } // If the reference precedes a "::" then treat it as a class reference. if ($offset - $end_pos === 2 && substr($file_contents, $end_pos, 2) === '::') { return [$possible_reference, '::', $offset]; } // Only continue for references that are partial / don't exist. if ($possible_reference[0] !== '*') { continue; } if ($offset - $end_pos === 0) { $recent_type = $possible_reference; return [$recent_type, 'symbol', $offset]; } } return null; } public function getTypeContextAtPosition(string $file_path, Position $position): ?Union { $file_contents = $this->getFileContents($file_path); $offset = $position->toOffset($file_contents); [$reference_map, $type_map, $argument_map] = $this->analyzer->getMapsForFile($file_path); if (!$reference_map && !$type_map && !$argument_map) { return null; } foreach ($argument_map as $start_pos => [$end_pos, $function, $argument_num]) { if ($offset < $start_pos || $offset > $end_pos) { continue; } // First parameter to a function-like $function_storage = $this->getFunctionStorageForSymbol($file_path, $function . '()'); if (!$function_storage || !$function_storage->params || !isset($function_storage->params[$argument_num])) { return null; } return $function_storage->params[$argument_num]->type; } return null; } /** * @return list<CompletionItem> */ public function getCompletionItemsForClassishThing(string $type_string, string $gap): array { $completion_items = []; $type = Type::parseString($type_string); foreach ($type->getAtomicTypes() as $atomic_type) { if ($atomic_type instanceof TNamedObject) { try { $class_storage = $this->classlike_storage_provider->get($atomic_type->value); foreach ($class_storage->appearing_method_ids as $declaring_method_id) { $method_storage = $this->methods->getStorage($declaring_method_id); if ($method_storage->is_static || $gap === '->') { $completion_item = new CompletionItem( $method_storage->cased_name, CompletionItemKind::METHOD, (string)$method_storage, $method_storage->description, (string)$method_storage->visibility, $method_storage->cased_name, $method_storage->cased_name . (count($method_storage->params) !== 0 ? '($0)' : '()'), null, null, new Command('Trigger parameter hints', 'editor.action.triggerParameterHints'), null, 2 ); $completion_item->insertTextFormat = InsertTextFormat::SNIPPET; $completion_items[] = $completion_item; } } foreach ($class_storage->declaring_property_ids as $property_name => $declaring_class) { $property_storage = $this->properties->getStorage( $declaring_class . '::$' . $property_name ); if ($property_storage->is_static || $gap === '->') { $completion_items[] = new CompletionItem( '$' . $property_name, CompletionItemKind::PROPERTY, $property_storage->getInfo(), $property_storage->description, (string)$property_storage->visibility, $property_name, ($gap === '::' ? '$' : '') . $property_name ); } } foreach ($class_storage->constants as $const_name => $const) { $completion_items[] = new CompletionItem( $const_name, CompletionItemKind::VARIABLE, 'const ' . $const_name, $const->description, null, $const_name, $const_name ); } } catch (Exception $e) { error_log($e->getMessage()); continue; } } } return $completion_items; } /** * @return list<CompletionItem> */ public function getCompletionItemsForPartialSymbol( string $type_string, int $offset, string $file_path ): array { $fq_suggestion = false; if (($type_string[1] ?? '') === '\\') { $fq_suggestion = true; } $matching_classlike_names = $this->classlikes->getMatchingClassLikeNames($type_string); $completion_items = []; $file_storage = $this->file_storage_provider->get($file_path); $aliases = null; foreach ($file_storage->classlikes_in_file as $fq_class_name => $_) { try { $class_storage = $this->classlike_storage_provider->get($fq_class_name); } catch (Exception $e) { continue; } if (!$class_storage->stmt_location) { continue; } if ($offset > $class_storage->stmt_location->raw_file_start && $offset < $class_storage->stmt_location->raw_file_end ) { $aliases = $class_storage->aliases; break; } } if (!$aliases) { foreach ($file_storage->namespace_aliases as $namespace_start => $namespace_aliases) { if ($namespace_start < $offset) { $aliases = $namespace_aliases; break; } } if (!$aliases) { $aliases = $file_storage->aliases; } } foreach ($matching_classlike_names as $fq_class_name) { $extra_edits = []; $insertion_text = Type::getStringFromFQCLN( $fq_class_name, $aliases && $aliases->namespace ? $aliases->namespace : null, $aliases->uses_flipped ?? [], null ); if ($aliases && !$fq_suggestion && $aliases->namespace && $insertion_text === '\\' . $fq_class_name && $aliases->namespace_first_stmt_start ) { $file_contents = $this->getFileContents($file_path); $class_name = preg_replace('/^.*\\\/', '', $fq_class_name); if ($aliases->uses_end) { $position = self::getPositionFromOffset($aliases->uses_end, $file_contents); $extra_edits[] = new TextEdit( new Range( $position, $position ), "\n" . 'use ' . $fq_class_name . ';' ); } else { $position = self::getPositionFromOffset($aliases->namespace_first_stmt_start, $file_contents); $extra_edits[] = new TextEdit( new Range( $position, $position ), 'use ' . $fq_class_name . ';' . "\n" . "\n" ); } $insertion_text = $class_name; } try { $class_storage = $this->classlike_storage_provider->get($fq_class_name); $description = $class_storage->description; } catch (Exception $e) { $description = null; } $completion_items[] = new CompletionItem( $fq_class_name, CompletionItemKind::CLASS_, null, $description, null, $fq_class_name, $insertion_text, null, $extra_edits ); } $functions = $this->functions->getMatchingFunctionNames($type_string, $offset, $file_path, $this); $namespace_map = []; if ($aliases) { $namespace_map += $aliases->uses_flipped; if ($aliases->namespace) { $namespace_map[$aliases->namespace] = ''; } } // Sort the map by longest first, so we replace most specific // used namespaces first. ksort($namespace_map); $namespace_map = array_reverse($namespace_map); foreach ($functions as $function_lowercase => $function) { // Transform FQFN relative to all uses namespaces $function_name = $function->cased_name; if (!$function_name) { continue; } $in_namespace_map = false; foreach ($namespace_map as $namespace_name => $namespace_alias) { if (strpos($function_lowercase, $namespace_name . '\\') === 0) { $function_name = $namespace_alias . '\\' . substr($function_name, strlen($namespace_name) + 1); $in_namespace_map = true; } } // If the function is not use'd, and it's not a global function // prepend it with a backslash. if (!$in_namespace_map && strpos($function_name, '\\') !== false) { $function_name = '\\' . $function_name; } $completion_items[] = new CompletionItem( $function_name, CompletionItemKind::FUNCTION, $function->getSignature(false), $function->description, null, $function_name, $function_name . (count($function->params) !== 0 ? '($0)' : '()'), null, null, new Command('Trigger parameter hints', 'editor.action.triggerParameterHints'), null, 2 ); } return $completion_items; } /** * @return list<CompletionItem> */ public function getCompletionItemsForType(Union $type): array { $completion_items = []; foreach ($type->getAtomicTypes() as $atomic_type) { if ($atomic_type instanceof TBool) { $bools = (string) $atomic_type === 'bool' ? ['true', 'false'] : [(string) $atomic_type]; foreach ($bools as $property_name) { $completion_items[] = new CompletionItem( $property_name, CompletionItemKind::VALUE, 'bool', null, null, null, $property_name ); } } elseif ($atomic_type instanceof TLiteralString) { $completion_items[] = new CompletionItem( $atomic_type->value, CompletionItemKind::VALUE, $atomic_type->getId(), null, null, null, "'$atomic_type->value'" ); } elseif ($atomic_type instanceof TLiteralInt) { $completion_items[] = new CompletionItem( (string) $atomic_type->value, CompletionItemKind::VALUE, $atomic_type->getId(), null, null, null, (string) $atomic_type->value ); } elseif ($atomic_type instanceof TClassConstant) { $const = $atomic_type->fq_classlike_name . '::' . $atomic_type->const_name; $completion_items[] = new CompletionItem( $const, CompletionItemKind::VALUE, $atomic_type->getId(), null, null, null, $const ); } } return $completion_items; } /** * @return list<CompletionItem> */ public function getCompletionItemsForArrayKeys( string $type_string ): array { $completion_items = []; $type = Type::parseString($type_string); foreach ($type->getAtomicTypes() as $atomic_type) { if ($atomic_type instanceof TKeyedArray) { foreach ($atomic_type->properties as $property_name => $property) { $completion_items[] = new CompletionItem( (string) $property_name, CompletionItemKind::PROPERTY, (string) $property, null, null, null, "'$property_name'" ); } } } return $completion_items; } private static function getPositionFromOffset(int $offset, string $file_contents): Position { $file_contents = substr($file_contents, 0, $offset); $offsetLength = $offset - strlen($file_contents); //PHP 8.0: Argument #3 ($offset) must be contained in argument #1 ($haystack) if (($textlen = strlen($file_contents)) < $offsetLength) { $offsetLength = $textlen; } $before_newline_count = strrpos($file_contents, "\n", $offsetLength); return new Position( substr_count($file_contents, "\n"), $offset - (int)$before_newline_count - 1 ); } public function addTemporaryFileChanges(string $file_path, string $new_content): void { $this->file_provider->addTemporaryFileChanges($file_path, $new_content); } public function removeTemporaryFileChanges(string $file_path): void { $this->file_provider->removeTemporaryFileChanges($file_path); } /** * Checks if type is a subtype of other * * Given two types, checks if `$input_type` is a subtype of `$container_type`. * If you consider `Union` as a set of types, this will tell you * if `$input_type` is fully contained in `$container_type`, * * $input_type ⊆ $container_type * * Useful for emitting issues like InvalidArgument, where argument at the call site * should be a subset of the function parameter type. */ public function isTypeContainedByType( Union $input_type, Union $container_type ): bool { return UnionTypeComparator::isContainedBy($this, $input_type, $container_type); } /** * Checks if type has any part that is a subtype of other * * Given two types, checks if *any part* of `$input_type` is a subtype of `$container_type`. * If you consider `Union` as a set of types, this will tell you if intersection * of `$input_type` with `$container_type` is not empty. * * $input_type ∩ $container_type ≠ ∅ , e.g. they are not disjoint. * * Useful for emitting issues like PossiblyInvalidArgument, where argument at the call * site should be a subtype of the function parameter type, but it's has some types that are * not a subtype of the required type. */ public function canTypeBeContainedByType( Union $input_type, Union $container_type ): bool { return UnionTypeComparator::canBeContainedBy($this, $input_type, $container_type); } /** * Extracts key and value types from a traversable object (or iterable) * * Given an iterable type (*but not TArray*) returns a tuple of it's key/value types. * First element of the tuple holds key type, second has the value type. * * Example: * ```php * $codebase->getKeyValueParamsForTraversableObject(Type::parseString('iterable<int,string>')) * // returns [Union(TInt), Union(TString)] * ``` * * @return array{Union, Union} */ public function getKeyValueParamsForTraversableObject(Atomic $type): array { $key_type = null; $value_type = null; ForeachAnalyzer::getKeyValueParamsForTraversableObject($type, $this, $key_type, $value_type); return [ $key_type ?? Type::getMixed(), $value_type ?? Type::getMixed(), ]; } /** * @param array<string, mixed> $phantom_classes * @psalm-suppress PossiblyUnusedMethod part of the public API */ public function queueClassLikeForScanning( string $fq_classlike_name, bool $analyze_too = false, bool $store_failure = true, array $phantom_classes = [] ): void { $this->scanner->queueClassLikeForScanning($fq_classlike_name, $analyze_too, $store_failure, $phantom_classes); } /** * @param array<string> $taints * * @psalm-suppress PossiblyUnusedMethod */ public function addTaintSource( Union $expr_type, string $taint_id, array $taints = TaintKindGroup::ALL_INPUT, ?CodeLocation $code_location = null ): void { if (!$this->taint_flow_graph) { return; } $source = new TaintSource( $taint_id, $taint_id, $code_location, null, $taints ); $this->taint_flow_graph->addSource($source); $expr_type->parent_nodes = [ $source->id => $source, ]; } /** * @param array<string> $taints * * @psalm-suppress PossiblyUnusedMethod */ public function addTaintSink( string $taint_id, array $taints = TaintKindGroup::ALL_INPUT, ?CodeLocation $code_location = null ): void { if (!$this->taint_flow_graph) { return; } $sink = new TaintSink( $taint_id, $taint_id, $code_location, null, $taints ); $this->taint_flow_graph->addSink($sink); } public function getMinorAnalysisPhpVersion(): int { return self::transformPhpVersionId($this->analysis_php_version_id % 10_000, 100); } public function getMajorAnalysisPhpVersion(): int { return self::transformPhpVersionId($this->analysis_php_version_id, 10_000); } public static function transformPhpVersionId(int $php_version_id, int $div): int { return intdiv($php_version_id, $div); } }
1
12,524
This isn't really that closely related to everything else in this PR, but I'm pretty sure it's a correct change. I'm not familiar enough with the taint feature to know how to test it, but I worry that without this change some of the taint stuff might mess up the `parent_nodes` for a reference.
vimeo-psalm
php
@@ -6,8 +6,9 @@ package javaslang; import javaslang.collection.Iterator; -import javaslang.collection.List; import javaslang.collection.Seq; +import javaslang.collection.Stream; +import javaslang.collection.Vector; import javaslang.control.Option; import java.io.IOException;
1
/* / \____ _ _ ____ ______ / \ ____ __ _______ * / / \/ \ / \/ \ / /\__\/ // \/ \ // /\__\ JΛVΛSLΛNG * _/ / /\ \ \/ / /\ \\__\\ \ // /\ \ /\\/ \ /__\ \ Copyright 2014-2016 Javaslang, http://javaslang.io * /___/\_/ \_/\____/\_/ \_/\__\/__/\__\_/ \_// \__/\_____/ Licensed under the Apache License, Version 2.0 */ package javaslang; import javaslang.collection.Iterator; import javaslang.collection.List; import javaslang.collection.Seq; import javaslang.control.Option; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Proxy; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; /** * Represents a lazy evaluated value. Compared to a Supplier, Lazy is memoizing, i.e. it evaluates only once and * therefore is referential transparent. * * <pre> * <code> * final Lazy&lt;Double&gt; l = Lazy.of(Math::random); * l.isEvaluated(); // = false * l.get(); // = 0.123 (random generated) * l.isEvaluated(); // = true * l.get(); // = 0.123 (memoized) * </code> * </pre> * * Since 2.0.0 you may also create a <em>real</em> lazy value (works only with interfaces): * * <pre><code>final CharSequence chars = Lazy.val(() -&gt; "Yay!", CharSequence.class);</code></pre> * * @author Daniel Dietrich * @since 1.2.1 */ // DEV-NOTE: No flatMap and orElse because this more like a Functor than a Monad. // It represents a value rather than capturing a specific state. public final class Lazy<T> implements Value<T>, Supplier<T>, Serializable { private static final long serialVersionUID = 1L; // read http://javarevisited.blogspot.de/2014/05/double-checked-locking-on-singleton-in-java.html private transient volatile Supplier<? extends T> supplier; private T value; // will behave as a volatile in reality, because a supplier volatile read will update all fields (see https://www.cs.umd.edu/~pugh/java/memoryModel/jsr-133-faq.html#volatile) // should not be called directly private Lazy(Supplier<? extends T> supplier) { this.supplier = supplier; } /** * Narrows a widened {@code Lazy<? extends T>} to {@code Lazy<T>} * by performing a type-safe cast. This is eligible because immutable/read-only * collections are covariant. * * @param lazy A {@code Lazy}. * @param <T> Component type of the {@code Lazy}. * @return the given {@code lazy} instance as narrowed type {@code Lazy<T>}. */ @SuppressWarnings("unchecked") public static <T> Lazy<T> narrow(Lazy<? extends T> lazy) { return (Lazy<T>) lazy; } /** * Creates a {@code Lazy} that requests its value from a given {@code Supplier}. The supplier is asked only once, * the value is memoized. * * @param <T> type of the lazy value * @param supplier A supplier * @return A new instance of Lazy */ @SuppressWarnings("unchecked") public static <T> Lazy<T> of(Supplier<? extends T> supplier) { Objects.requireNonNull(supplier, "supplier is null"); if (supplier instanceof Lazy) { return (Lazy<T>) supplier; } else { return new Lazy<>(supplier); } } /** * Reduces many {@code Lazy} values into a single {@code Lazy} by transforming an * {@code Iterable<Lazy<? extends T>>} into a {@code Lazy<Seq<T>>}. * * @param <T> Type of the lazy values. * @param values An iterable of lazy values. * @return A lazy sequence of values. * @throws NullPointerException if values is null */ public static <T> Lazy<Seq<T>> sequence(Iterable<? extends Lazy<? extends T>> values) { Objects.requireNonNull(values, "values is null"); return Lazy.of(() -> List.ofAll(values).map(Lazy::get)); } /** * Creates a real _lazy value_ of type {@code T}, backed by a {@linkplain java.lang.reflect.Proxy} which delegates * to a {@code Lazy} instance. * * @param supplier A supplier * @param type An interface * @param <T> type of the lazy value * @return A new instance of T */ @GwtIncompatible("reflection is not supported") @SuppressWarnings("unchecked") public static <T> T val(Supplier<? extends T> supplier, Class<T> type) { Objects.requireNonNull(supplier, "supplier is null"); Objects.requireNonNull(type, "type is null"); if (!type.isInterface()) { throw new IllegalArgumentException("type has to be an interface"); } final Lazy<T> lazy = Lazy.of(supplier); final InvocationHandler handler = (proxy, method, args) -> method.invoke(lazy.get(), args); return (T) Proxy.newProxyInstance(type.getClassLoader(), new Class<?>[] { type }, handler); } public Option<T> filter(Predicate<? super T> predicate) { final T v = get(); return predicate.test(v) ? Option.some(v) : Option.none(); } /** * Evaluates this lazy value and caches it, when called the first time. * On subsequent calls, returns the cached value. * * @return the lazy evaluated value * @throws NoSuchElementException if this value is undefined */ @Override public T get() { return (supplier == null) ? value : computeValue(); } private synchronized T computeValue() { final Supplier<? extends T> s = supplier; if (s != null) { value = s.get(); supplier = null; } return value; } @Override public boolean isEmpty() { return false; } /** * Checks, if this lazy value is evaluated. * <p> * Note: A value is internally evaluated (once) by calling {@link #get()}. * * @return true, if the value is evaluated, false otherwise. * @throws UnsupportedOperationException if this value is undefined */ public boolean isEvaluated() { return supplier == null; } @Override public boolean isSingleValued() { return true; } @Override public Iterator<T> iterator() { return Iterator.of(get()); } @Override public <U> Lazy<U> map(Function<? super T, ? extends U> mapper) { return Lazy.of(() -> mapper.apply(get())); } @Override public Lazy<T> peek(Consumer<? super T> action) { action.accept(get()); return this; } /** * Transforms this {@code Lazy}. * * @param f A transformation * @param <U> Type of transformation result * @return An instance of type {@code U} * @throws NullPointerException if {@code f} is null */ public <U> U transform(Function<? super Lazy<T>, ? extends U> f) { Objects.requireNonNull(f, "f is null"); return f.apply(this); } @Override public String stringPrefix() { return "Lazy"; } @Override public boolean equals(Object o) { return (o == this) || (o instanceof Lazy && Objects.equals(((Lazy<?>) o).get(), get())); } @Override public int hashCode() { return Objects.hash(get()); } @Override public String toString() { return stringPrefix() + "(" + (!isEvaluated() ? "?" : value) + ")"; } /** * Ensures that the value is evaluated before serialization. * * @param s An object serialization stream. * @throws java.io.IOException If an error occurs writing to the stream. */ @GwtIncompatible("The Java serialization protocol is explicitly not supported") private void writeObject(ObjectOutputStream s) throws IOException { get(); // evaluates the lazy value if it isn't evaluated yet! s.defaultWriteObject(); } }
1
10,395
[Checkstyle] ERROR: Unused import - javaslang.collection.Stream.
vavr-io-vavr
java
@@ -126,7 +126,7 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 { ,"//result/doc/str[@name='ssto']" ,"//result/doc/str[@name='subject']" - ,"//result/doc[count(*)=4]" + ,"//result/doc[count(*)=5]" ); } }
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search; import java.util.List; import java.util.Arrays; import java.util.Collections; import java.util.Random; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.TestCloudPseudoReturnFields; import org.apache.solr.schema.SchemaField; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.commons.lang.StringUtils; import org.junit.Before; import org.junit.BeforeClass; /** @see TestCloudPseudoReturnFields */ public class TestPseudoReturnFields extends SolrTestCaseJ4 { // :TODO: datatypes produced by the functions used may change /** * values of the fl param that mean all real fields */ public static String[] ALL_REAL_FIELDS = new String[] { "", "*" }; /** * values of the fl param that mean all real fields and score */ public static String[] SCORE_AND_REAL_FIELDS = new String[] { "score,*", "*,score" }; @BeforeClass public static void beforeTests() throws Exception { initCore("solrconfig-tlog.xml","schema-psuedo-fields.xml"); assertU(adoc("id", "42", "val_i", "1", "ssto", "X", "subject", "aaa")); assertU(adoc("id", "43", "val_i", "9", "ssto", "X", "subject", "bbb")); assertU(adoc("id", "44", "val_i", "4", "ssto", "X", "subject", "aaa")); assertU(adoc("id", "45", "val_i", "6", "ssto", "X", "subject", "aaa")); assertU(adoc("id", "46", "val_i", "3", "ssto", "X", "subject", "ggg")); assertU(commit()); } @Before private void addUncommittedDoc99() throws Exception { // uncommitted doc in transaction log at start of every test // Even if an RTG causes ulog to re-open realtime searcher, next test method // will get another copy of doc 99 in the ulog assertU(adoc("id", "99", "val_i", "1", "ssto", "X", "subject", "uncommitted")); } public void testMultiValued() throws Exception { // the response writers used to consult isMultiValued on the field // but this doesn't work when you alias a single valued field to // a multi valued field (the field value is copied first, then // if the type lookup is done again later, we get the wrong thing). SOLR-4036 // score as psuedo field - precondition checks for (String name : new String[] {"score", "val_ss"}) { SchemaField sf = h.getCore().getLatestSchema().getFieldOrNull(name); assertNotNull("Test depends on a (dynamic) field mtching '"+name+ "', schema was changed out from under us!",sf); assertTrue("Test depends on a multivalued dynamic field matching '"+name+ "', schema was changed out from under us!", sf.multiValued()); } // score as psuedo field assertJQ(req("q","*:*", "fq", "id:42", "fl","id,score") ,"/response/docs==[{'id':'42','score':1.0}]"); // single value int using alias that matches multivalued dynamic field assertJQ(req("q","id:42", "fl","val_ss:val_i, val2_ss:10") ,"/response/docs==[{'val2_ss':10,'val_ss':1}]" ); assertJQ(req("qt","/get", "id","42", "fl","val_ss:val_i, val2_ss:10") ,"/doc=={'val2_ss':10,'val_ss':1}" ); } public void testMultiValuedRTG() throws Exception { // single value int using alias that matches multivalued dynamic field - via RTG assertJQ(req("qt","/get", "id","42", "fl","val_ss:val_i, val2_ss:10, subject") ,"/doc=={'val2_ss':10,'val_ss':1, 'subject':'aaa'}" ); // also check real-time-get from transaction log assertJQ(req("qt","/get", "id","99", "fl","val_ss:val_i, val2_ss:10, subject") ,"/doc=={'val2_ss':10,'val_ss':1,'subject':'uncommitted'}" ); } public void testAllRealFields() throws Exception { for (String fl : ALL_REAL_FIELDS) { assertQ("fl="+fl+" ... all real fields", req("q","*:*", "rows", "1", "fl",fl) ,"//result[@numFound='5']" ,"//result/doc/str[@name='id']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='ssto']" ,"//result/doc/str[@name='subject']" ,"//result/doc[count(*)=4]" ); } } public void testAllRealFieldsRTG() throws Exception { // shouldn't matter if we use RTG (committed or otherwise) for (String fl : ALL_REAL_FIELDS) { for (String id : Arrays.asList("42","99")) { assertQ("id="+id+", fl="+fl+" ... all real fields", req("qt","/get","id",id, "wt","xml","fl",fl) ,"count(//doc)=1" ,"//doc/str[@name='id']" ,"//doc/int[@name='val_i']" ,"//doc/str[@name='ssto']" ,"//doc/str[@name='subject']" ,"//doc[count(*)=4]" ); } } } public void testFilterAndOneRealFieldRTG() throws Exception { // shouldn't matter if we use RTG (committed or otherwise) // only one of these docs should match... assertQ("RTG w/ 2 ids & fq that only matches 1 uncommitted doc", req("qt","/get","ids","42,99", "wt","xml","fl","id,val_i", "fq","{!field f='subject' v=$my_var}","my_var","uncommitted") ,"//result[@numFound='1']" ,"//result/doc/str[@name='id'][.='99']" ,"//result/doc/int[@name='val_i'][.='1']" ,"//result/doc[count(*)=2]" ); } public void testScoreAndAllRealFields() throws Exception { for (String fl : SCORE_AND_REAL_FIELDS) { assertQ("fl="+fl+" ... score and real fields", req("q","*:*", "rows", "1", "fl",fl) ,"//result[@numFound='5']" ,"//result/doc/str[@name='id']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='ssto']" ,"//result/doc/str[@name='subject']" ,"//result/doc/float[@name='score']" ,"//result/doc[count(*)=5]" ); } } public void testScoreAndAllRealFieldsRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored for (String fl : SCORE_AND_REAL_FIELDS) { for (String id : Arrays.asList("42","99")) { assertQ("id="+id+", fl="+fl+" ... score real fields", req("qt","/get","id",id, "wt","xml","fl",fl) ,"count(//doc)=1" ,"//doc/str[@name='id']" ,"//doc/int[@name='val_i']" ,"//doc/str[@name='ssto']" ,"//doc/str[@name='subject']" ,"//doc[count(*)=4]" ); } } } public void testScoreAndExplicitRealFields() throws Exception { assertQ("fl=score,val_i", req("q","*:*", "rows", "1", "fl","score,val_i") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/float[@name='score']" ,"//result/doc[count(*)=2]" ); assertQ("fl=score&fl=val_i", req("q","*:*", "rows", "1", "fl","score","fl","val_i") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/float[@name='score']" ,"//result/doc[count(*)=2]" ); assertQ("fl=val_i", req("q","*:*", "rows", "1", "fl","val_i") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc[count(*)=1]" ); } public void testScoreAndExplicitRealFieldsRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored for (String id : Arrays.asList("42","99")) { assertQ("id="+id+", fl=score,val_i", req("qt","/get","id",id, "wt","xml", "fl","score,val_i") ,"count(//doc)=1" ,"//doc/int[@name='val_i']" ,"//doc[count(*)=1]" ); } } public void testFunctions() throws Exception { assertQ("fl=log(val_i)", req("q","*:*", "rows", "1", "fl","log(val_i)") ,"//result[@numFound='5']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc[count(*)=1]" ); assertQ("fl=log(val_i),abs(val_i)", req("q","*:*", "rows", "1", "fl","log(val_i),abs(val_i)") ,"//result[@numFound='5']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/float[@name='abs(val_i)']" ,"//result/doc[count(*)=2]" ); assertQ("fl=log(val_i)&fl=abs(val_i)", req("q","*:*", "rows", "1", "fl","log(val_i)","fl","abs(val_i)") ,"//result[@numFound='5']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/float[@name='abs(val_i)']" ,"//result/doc[count(*)=2]" ); } public void testFunctionsRTG() throws Exception { // if we use RTG (committed or otherwise) functions should behave the same for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList(params("qt","/get","id",id,"wt","xml", "fl","log(val_i),abs(val_i)"), params("qt","/get","id",id,"wt","xml", "fl","log(val_i)","fl", "abs(val_i)"))) { assertQ("id="+id+", params="+p, req(p) ,"count(//doc)=1" // true for both these specific docs ,"//doc/double[@name='log(val_i)'][.='0.0']" ,"//doc/float[@name='abs(val_i)'][.='1.0']" ,"//doc[count(*)=2]" ); } } } public void testFunctionsAndExplicit() throws Exception { assertQ("fl=log(val_i),val_i", req("q","*:*", "rows", "1", "fl","log(val_i),val_i") ,"//result[@numFound='5']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/int[@name='val_i']" ,"//result/doc[count(*)=2]" ); assertQ("fl=log(val_i)&fl=val_i", req("q","*:*", "rows", "1", "fl","log(val_i)","fl","val_i") ,"//result[@numFound='5']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/int[@name='val_i']" ,"//result/doc[count(*)=2]" ); } public void testFunctionsAndExplicitRTG() throws Exception { // shouldn't matter if we use RTG (committed or otherwise) for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList(params("fl","log(val_i),val_i"), params("fl","log(val_i)","fl","val_i"))) { assertQ(id + " " + p, req(p, "qt","/get", "wt","xml","id",id) ,"count(//doc)=1" // true for both these specific docs ,"//doc/double[@name='log(val_i)'][.='0.0']" ,"//doc/int[@name='val_i'][.='1']" ,"//doc[count(*)=2]" ); } } } public void testFunctionsAndScore() throws Exception { assertQ("fl=log(val_i),score", req("q","*:*", "rows", "1", "fl","log(val_i),score") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc[count(*)=2]" ); assertQ("fl=log(val_i)&fl=score", req("q","*:*", "rows", "1", "fl","log(val_i)","fl","score") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc[count(*)=2]" ); assertQ("fl=score,log(val_i),abs(val_i)", req("q","*:*", "rows", "1", "fl","score,log(val_i),abs(val_i)") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/float[@name='abs(val_i)']" ,"//result/doc[count(*)=3]" ); assertQ("fl=score&fl=log(val_i)&fl=abs(val_i)", req("q","*:*", "rows", "1", "fl","score","fl","log(val_i)","fl","abs(val_i)") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/double[@name='log(val_i)']" ,"//result/doc/float[@name='abs(val_i)']" ,"//result/doc[count(*)=3]" ); } public void testFunctionsAndScoreRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList(params("fl","score","fl","log(val_i)","fl","abs(val_i)"), params("fl","score","fl","log(val_i),abs(val_i)"), params("fl","score,log(val_i)","fl","abs(val_i)"), params("fl","score,log(val_i),abs(val_i)"))) { assertQ("id="+id+", p="+p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/double[@name='log(val_i)']" ,"//doc/float[@name='abs(val_i)'][.='1.0']" ,"//doc[count(*)=2]" ); } } } public void testGlobs() throws Exception { assertQ("fl=val_*", req("q","*:*", "rows", "1", "fl","val_*") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc[count(*)=1]" ); for (SolrParams p : Arrays.asList(params("q", "*:*", "rows", "1", "fl","val_*,subj*,ss*"), params("q", "*:*", "rows", "1", "fl","val_*","fl","subj*,ss*"), params("q", "*:*", "rows", "1", "fl","val_*","fl","subj*","fl","ss*"))) { assertQ(p.toString(), req(p) ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='subject']" ,"//result/doc/str[@name='ssto'][.='X']" ,"//result/doc[count(*)=3]" ); } } public void testGlobsRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted for (String id : Arrays.asList("42","99")) { assertQ(id + ": fl=val_*", req("qt","/get","id",id, "wt","xml", "fl","val_*") ,"count(//doc)=1" ,"//doc/int[@name='val_i'][.=1]" ,"//doc[count(*)=1]" ); for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,ss*"), params("fl","val_*","fl","subj*,ss*"))) { assertQ(id + ": " + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/int[@name='val_i'][.=1]" ,"//doc/str[@name='subject']" // value differs between docs ,"//doc/str[@name='ssto'][.='X']" ,"//doc[count(*)=3]" ); } } } public void testGlobsAndExplicit() throws Exception { assertQ("fl=val_*,id", req("q","*:*", "rows", "1", "fl","val_*,id") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='id']" ,"//result/doc[count(*)=2]" ); for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,id"), params("fl","val_*","fl","subj*","fl","id"), params("fl","val_*","fl","subj*,id"))) { assertQ("" + p, req(p, "q","*:*", "rows", "1") ,"//result[@numFound='5']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='subject']" ,"//result/doc/str[@name='id']" ,"//result/doc[count(*)=3]" ); } } public void testGlobsAndExplicitRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted for (String id : Arrays.asList("42","99")) { assertQ(id + " + fl=val_*,id", req("qt","/get","id",id, "wt","xml", "fl","val_*,id") ,"count(//doc)=1" ,"//doc/int[@name='val_i'][.=1]" ,"//doc/str[@name='id']" ,"//doc[count(*)=2]" ); for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,id"), params("fl","val_*","fl","subj*","fl","id"), params("fl","val_*","fl","subj*,id"))) { assertQ(id + " + " + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/int[@name='val_i'][.=1]" ,"//doc/str[@name='subject']" ,"//doc/str[@name='id']" ,"//doc[count(*)=3]" ); } } } public void testGlobsAndScore() throws Exception { assertQ("fl=val_*,score", req("q","*:*", "rows", "1", "fl","val_*,score", "indent", "true") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/int[@name='val_i']" ,"//result/doc[count(*)=2]" ); for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,score"), params("fl","val_*","fl","subj*","fl","score"), params("fl","val_*","fl","subj*,score"))) { assertQ("" + p, req(p, "q","*:*", "rows", "1") ,"//result[@numFound='5']" ,"//result/doc/float[@name='score']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/str[@name='subject']" ,"//result/doc[count(*)=3]" ); } } public void testGlobsAndScoreRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted, score should be ignored for (String id : Arrays.asList("42","99")) { assertQ(id + ": fl=val_*,score", req("qt","/get","id",id, "wt","xml", "fl","val_*,score") ,"count(//doc)=1" ,"//doc/int[@name='val_i']" ,"//doc[count(*)=1]" ); for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,score"), params("fl","val_*","fl","subj*","fl","score"), params("fl","val_*","fl","subj*,score"))) { assertQ("" + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/int[@name='val_i']" ,"//doc/str[@name='subject']" ,"//doc[count(*)=2]" ); } } } public void testAugmenters() throws Exception { assertQ("fl=[docid]", req("q","*:*", "rows", "1", "fl","[docid]") ,"//result[@numFound='5']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc[count(*)=1]" ); for (SolrParams p : Arrays.asList(params("fl","[docid],[shard],[explain],x_alias:[value v=10 t=int]"), params("fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]"), params("fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { assertQ("" + p, req(p, "q","*:*", "rows", "1") ,"//result[@numFound='5']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc/str[@name='[shard]'][.='[not a shard request]']" ,"//result/doc/str[@name='[explain]']" ,"//result/doc/int[@name='x_alias'][.=10]" ,"//result/doc[count(*)=4]" ); } } public void testDocIdAugmenterRTG() throws Exception { // for an uncommitted doc, we should get -1 for (String id : Arrays.asList("42","99")) { assertQ(id + ": fl=[docid]", req("qt","/get","id",id, "wt","xml", "fl","[docid]") ,"count(//doc)=1" ,"//doc/int[@name='[docid]'][.>=-1]" ,"//doc[count(*)=1]" ); } } public void testAugmentersRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList (params("fl","[docid],[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"), params("fl","[docid],[shard],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"), params("fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]","fl","abs(val_i)"), params("fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) { assertQ(id + ": " + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/int[@name='[docid]'][.>=-1]" ,"//doc/float[@name='abs(val_i)'][.='1.0']" ,"//doc/str[@name='[shard]'][.='[not a shard request]']" // RTG: [explain] should be missing (ignored) ,"//doc/int[@name='x_alias'][.=10]" ,"//doc[count(*)=4]" ); } } } public void testAugmentersAndExplicit() throws Exception { for (SolrParams p : Arrays.asList(params("fl","id,[docid],[explain],x_alias:[value v=10 t=int]"), params("fl","id","fl","[docid],[explain],x_alias:[value v=10 t=int]"), params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { assertQ(p.toString(), req(p, "q","*:*", "rows", "1") ,"//result[@numFound='5']" ,"//result/doc/str[@name='id']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc/str[@name='[explain]']" ,"//result/doc/int[@name='x_alias'][.=10]" ,"//result/doc[count(*)=4]" ); } } public void testAugmentersAndExplicitRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList (params("fl","id,[docid],[explain],x_alias:[value v=10 t=int],abs(val_i)"), params("fl","id,[docid],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"), params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) { assertQ(id + ": " + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/str[@name='id']" ,"//doc/int[@name='[docid]'][.>=-1]" ,"//doc/float[@name='abs(val_i)'][.='1.0']" // RTG: [explain] should be missing (ignored) ,"//doc/int[@name='x_alias'][.=10]" ,"//doc[count(*)=4]" ); } } } public void testAugmentersAndScore() throws Exception { assertQ(req("q","*:*", "rows", "1", "fl","[docid],x_alias:[value v=10 t=int],score") ,"//result[@numFound='5']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc/int[@name='x_alias'][.=10]" ,"//result/doc/float[@name='score']" ,"//result/doc[count(*)=3]" ); for (SolrParams p : Arrays.asList(params("fl","[docid],x_alias:[value v=10 t=int],[explain],score"), params("fl","[docid]","fl","x_alias:[value v=10 t=int],[explain]","fl","score"), params("fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score"))) { assertQ(p.toString(), req(p, "q","*:*", "rows", "1") ,"//result[@numFound='5']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc/int[@name='x_alias'][.=10]" ,"//result/doc/str[@name='[explain]']" ,"//result/doc/float[@name='score']" ,"//result/doc[count(*)=4]" ); } } public void testAugmentersAndScoreRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored for (String id : Arrays.asList("42","99")) { assertQ(id, req("qt","/get","id",id, "wt","xml", "fl","x_alias:[value v=10 t=int],score,abs(val_i),[docid]") ,"//doc/int[@name='[docid]'][.>=-1]" ,"//doc/float[@name='abs(val_i)'][.='1.0']" ,"//doc/int[@name='x_alias'][.=10]" ,"//doc[count(*)=3]" ); for (SolrParams p : Arrays.asList(params("fl","[docid],x_alias:[value v=10 t=int],[explain],score,abs(val_i)"), params("fl","x_alias:[value v=10 t=int],[explain]","fl","[docid],score,abs(val_i)"), params("fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score","fl","abs(val_i)"))) { assertQ(p.toString(), req(p, "qt","/get","id",id, "wt","xml") ,"//doc/int[@name='[docid]']" // TODO ,"//doc/float[@name='abs(val_i)'][.='1.0']" ,"//doc/int[@name='x_alias'][.=10]" // RTG: [explain] and score should be missing (ignored) ,"//doc[count(*)=3]" ); } } } public void testAugmentersGlobsExplicitAndScoreOhMy() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one final List<String> fl = Arrays.asList ("id","[docid]","[explain]","score","val_*","subj*"); final int iters = atLeast(random, 10); for (int i = 0; i< iters; i++) { Collections.shuffle(fl, random); final SolrParams singleFl = params("q","*:*", "rows", "1","fl",StringUtils.join(fl.toArray(),',')); final ModifiableSolrParams multiFl = params("q","*:*", "rows", "1"); for (String item : fl) { multiFl.add("fl",item); } for (SolrParams p : Arrays.asList(singleFl, multiFl)) { assertQ(p.toString(), req(p) ,"//result[@numFound='5']" ,"//result/doc/str[@name='id']" ,"//result/doc/float[@name='score']" ,"//result/doc/str[@name='subject']" ,"//result/doc/int[@name='val_i']" ,"//result/doc/int[@name='[docid]']" ,"//result/doc/str[@name='[explain]']" ,"//result/doc[count(*)=6]" ); } } } public void testAugmentersGlobsExplicitAndScoreOhMyRTG() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one final List<String> fl = Arrays.asList ("id","[explain]","score","val_*","subj*","abs(val_i)","[docid]"); final int iters = atLeast(random, 10); for (int i = 0; i< iters; i++) { Collections.shuffle(fl, random); final SolrParams singleFl = params("fl",StringUtils.join(fl.toArray(),',')); final ModifiableSolrParams multiFl = params(); for (String item : fl) { multiFl.add("fl",item); } // RTG behavior should be consistent, (committed or otherwise) for (String id : Arrays.asList("42","99")) { for (SolrParams p : Arrays.asList(singleFl, multiFl)) { assertQ(id + ": " + p, req(p, "qt","/get","id",id, "wt","xml") ,"count(//doc)=1" ,"//doc/str[@name='id']" ,"//doc/int[@name='[docid]'][.>=-1]" ,"//doc/float[@name='abs(val_i)'][.='1.0']" // RTG: [explain] and score should be missing (ignored) ,"//doc/int[@name='val_i'][.=1]" ,"//doc/str[@name='subject']" ,"//doc[count(*)=5]" ); } } } } }
1
28,004
The numbers were changed since now _root_ is also added to the doc, increasing the field count by 1.
apache-lucene-solr
java
@@ -169,12 +169,6 @@ class ArrayMapReturnTypeProvider implements FunctionReturnTypeProviderInterface $function_call_arg->value->items[1]->value->value, [] ); - } elseif ($variable_atomic_type instanceof TTemplateParamClass) { - $fake_method_call = new VirtualStaticCall( - $function_call_arg->value->items[0]->value, - $function_call_arg->value->items[1]->value->value, - [] - ); } }
1
<?php namespace Psalm\Internal\Provider\ReturnTypeProvider; use PhpParser; use Psalm\Context; use Psalm\Internal\Analyzer\Statements\Expression\AssertionFinder; use Psalm\Internal\Analyzer\Statements\Expression\Call\FunctionCallAnalyzer; use Psalm\Internal\Analyzer\Statements\Expression\Call\MethodCallAnalyzer; use Psalm\Internal\Analyzer\Statements\Expression\Call\StaticCallAnalyzer; use Psalm\Internal\Analyzer\Statements\Expression\CallAnalyzer; use Psalm\Internal\Analyzer\StatementsAnalyzer; use Psalm\Internal\Type\ArrayType; use Psalm\Node\Expr\VirtualArrayDimFetch; use Psalm\Node\Expr\VirtualFuncCall; use Psalm\Node\Expr\VirtualMethodCall; use Psalm\Node\Expr\VirtualStaticCall; use Psalm\Node\Expr\VirtualVariable; use Psalm\Node\Name\VirtualFullyQualified; use Psalm\Node\VirtualArg; use Psalm\Node\VirtualIdentifier; use Psalm\Plugin\EventHandler\Event\FunctionReturnTypeProviderEvent; use Psalm\Plugin\EventHandler\FunctionReturnTypeProviderInterface; use Psalm\Type; use Psalm\Type\Atomic\TArray; use Psalm\Type\Atomic\TKeyedArray; use Psalm\Type\Atomic\TList; use Psalm\Type\Atomic\TNamedObject; use Psalm\Type\Atomic\TNonEmptyArray; use Psalm\Type\Atomic\TNonEmptyList; use Psalm\Type\Atomic\TTemplateParam; use Psalm\Type\Atomic\TTemplateParamClass; use Psalm\Type\Union; use UnexpectedValueException; use function array_map; use function array_shift; use function array_slice; use function count; use function explode; use function in_array; use function mt_rand; use function reset; use function strpos; use function substr; class ArrayMapReturnTypeProvider implements FunctionReturnTypeProviderInterface { /** * @return array<lowercase-string> */ public static function getFunctionIds(): array { return ['array_map']; } public static function getFunctionReturnType(FunctionReturnTypeProviderEvent $event): Union { $statements_source = $event->getStatementsSource(); $call_args = $event->getCallArgs(); $context = $event->getContext(); if (!$statements_source instanceof StatementsAnalyzer) { return Type::getMixed(); } $function_call_arg = $call_args[0] ?? null; $function_call_type = $function_call_arg ? $statements_source->node_data->getType($function_call_arg->value) : null; if ($function_call_type && $function_call_type->isNull()) { array_shift($call_args); $array_arg_types = []; foreach ($call_args as $call_arg) { $call_arg_type = $statements_source->node_data->getType($call_arg->value); if ($call_arg_type) { $array_arg_types[] = clone $call_arg_type; } else { $array_arg_types[] = Type::getMixed(); break; } } if ($array_arg_types) { return new Union([new TKeyedArray($array_arg_types)]); } return Type::getArray(); } $array_arg = $call_args[1] ?? null; if (!$array_arg) { return Type::getArray(); } $array_arg_atomic_type = null; $array_arg_type = null; if ($array_arg_union_type = $statements_source->node_data->getType($array_arg->value)) { $arg_types = $array_arg_union_type->getAtomicTypes(); if (isset($arg_types['array'])) { $array_arg_atomic_type = $arg_types['array']; $array_arg_type = ArrayType::infer($array_arg_atomic_type); } } $generic_key_type = null; $mapping_return_type = null; if ($function_call_arg && $function_call_type) { if (count($call_args) === 2) { $generic_key_type = $array_arg_type->key ?? Type::getArrayKey(); } else { $generic_key_type = Type::getInt(); } if ($function_call_type->hasCallableType()) { $closure_types = $function_call_type->getClosureTypes() ?: $function_call_type->getCallableTypes(); $closure_atomic_type = reset($closure_types); $closure_return_type = $closure_atomic_type->return_type ?: Type::getMixed(); if ($closure_return_type->isVoid()) { $closure_return_type = Type::getNull(); } $mapping_return_type = clone $closure_return_type; } elseif ($function_call_arg->value instanceof PhpParser\Node\Scalar\String_ || $function_call_arg->value instanceof PhpParser\Node\Expr\Array_ || $function_call_arg->value instanceof PhpParser\Node\Expr\BinaryOp\Concat ) { $mapping_function_ids = CallAnalyzer::getFunctionIdsFromCallableArg( $statements_source, $function_call_arg->value ); if ($mapping_function_ids) { $mapping_return_type = self::getReturnTypeFromMappingIds( $statements_source, $mapping_function_ids, $context, $function_call_arg, array_slice($call_args, 1) ); } if ($function_call_arg->value instanceof PhpParser\Node\Expr\Array_ && isset($function_call_arg->value->items[0]) && isset($function_call_arg->value->items[1]) && $function_call_arg->value->items[1]->value instanceof PhpParser\Node\Scalar\String_ && $function_call_arg->value->items[0]->value instanceof PhpParser\Node\Expr\Variable && ($variable_type = $statements_source->node_data->getType($function_call_arg->value->items[0]->value)) ) { $fake_method_call = null; foreach ($variable_type->getAtomicTypes() as $variable_atomic_type) { if ($variable_atomic_type instanceof TTemplateParam || $variable_atomic_type instanceof TTemplateParamClass ) { $fake_method_call = new VirtualStaticCall( $function_call_arg->value->items[0]->value, $function_call_arg->value->items[1]->value->value, [] ); } elseif ($variable_atomic_type instanceof TTemplateParamClass) { $fake_method_call = new VirtualStaticCall( $function_call_arg->value->items[0]->value, $function_call_arg->value->items[1]->value->value, [] ); } } if ($fake_method_call) { $fake_method_return_type = self::executeFakeCall( $statements_source, $fake_method_call, $context ); if ($fake_method_return_type) { $mapping_return_type = $fake_method_return_type; } } } } } if ($mapping_return_type && $generic_key_type) { if ($array_arg_atomic_type instanceof TKeyedArray && count($call_args) === 2) { $atomic_type = new TKeyedArray( array_map( /** * @return Union */ function (Union $_) use ($mapping_return_type): Union { return clone $mapping_return_type; }, $array_arg_atomic_type->properties ) ); $atomic_type->is_list = $array_arg_atomic_type->is_list; $atomic_type->sealed = $array_arg_atomic_type->sealed; $atomic_type->previous_key_type = $array_arg_atomic_type->previous_key_type; $atomic_type->previous_value_type = $mapping_return_type; return new Union([$atomic_type]); } if ($array_arg_atomic_type instanceof TList || count($call_args) !== 2 ) { if ($array_arg_atomic_type instanceof TNonEmptyList) { return new Union([ new TNonEmptyList( $mapping_return_type ), ]); } return new Union([ new TList( $mapping_return_type ), ]); } if ($array_arg_atomic_type instanceof TNonEmptyArray) { return new Union([ new TNonEmptyArray([ $generic_key_type, $mapping_return_type, ]), ]); } return new Union([ new TArray([ $generic_key_type, $mapping_return_type, ]) ]); } return count($call_args) === 2 && !($array_arg_type->is_list ?? false) ? new Union([ new TArray([ $array_arg_type->key ?? Type::getArrayKey(), Type::getMixed(), ]) ]) : Type::getList(); } /** * @param-out array<string, array<array<int, string>>>|null $assertions */ private static function executeFakeCall( StatementsAnalyzer $statements_analyzer, PhpParser\Node\Expr $fake_call, Context $context, ?array &$assertions = null ): ?Union { $old_data_provider = $statements_analyzer->node_data; $statements_analyzer->node_data = clone $statements_analyzer->node_data; $suppressed_issues = $statements_analyzer->getSuppressedIssues(); if (!in_array('PossiblyInvalidMethodCall', $suppressed_issues, true)) { $statements_analyzer->addSuppressedIssues(['PossiblyInvalidMethodCall']); } if (!in_array('MixedArrayOffset', $suppressed_issues, true)) { $statements_analyzer->addSuppressedIssues(['MixedArrayOffset']); } $was_inside_call = $context->inside_call; $context->inside_call = true; if ($fake_call instanceof PhpParser\Node\Expr\StaticCall) { StaticCallAnalyzer::analyze( $statements_analyzer, $fake_call, $context ); } elseif ($fake_call instanceof PhpParser\Node\Expr\MethodCall) { MethodCallAnalyzer::analyze( $statements_analyzer, $fake_call, $context ); } elseif ($fake_call instanceof PhpParser\Node\Expr\FuncCall) { FunctionCallAnalyzer::analyze( $statements_analyzer, $fake_call, $context ); } else { throw new UnexpectedValueException('UnrecognizedCall'); } $codebase = $statements_analyzer->getCodebase(); if ($assertions !== null) { $anded_assertions = AssertionFinder::scrapeAssertions( $fake_call, null, $statements_analyzer, $codebase ); $assertions = $anded_assertions[0] ?? []; } $context->inside_call = $was_inside_call; if (!in_array('PossiblyInvalidMethodCall', $suppressed_issues, true)) { $statements_analyzer->removeSuppressedIssues(['PossiblyInvalidMethodCall']); } if (!in_array('MixedArrayOffset', $suppressed_issues, true)) { $statements_analyzer->removeSuppressedIssues(['MixedArrayOffset']); } $return_type = $statements_analyzer->node_data->getType($fake_call) ?? null; $statements_analyzer->node_data = $old_data_provider; return $return_type; } /** * @param non-empty-array<int, string> $mapping_function_ids * @param list<PhpParser\Node\Arg> $array_args * @param int|null $fake_var_discriminator Set the fake variable id to a known value with the discriminator * as a substring, and don't clear it from the context. * @param-out array<string, array<array<int, string>>>|null $assertions */ public static function getReturnTypeFromMappingIds( StatementsAnalyzer $statements_source, array $mapping_function_ids, Context $context, PhpParser\Node\Arg $function_call_arg, array $array_args, ?array &$assertions = null, ?int $fake_var_discriminator = null ): Union { $mapping_return_type = null; $codebase = $statements_source->getCodebase(); $clean_context = false; foreach ($mapping_function_ids as $mapping_function_id) { $mapping_function_id_parts = explode('&', $mapping_function_id); if ($fake_var_discriminator === null) { $fake_var_discriminator = mt_rand(); $clean_context = true; } foreach ($mapping_function_id_parts as $mapping_function_id_part) { $fake_args = []; foreach ($array_args as $array_arg) { $fake_args[] = new VirtualArg( new VirtualArrayDimFetch( $array_arg->value, new VirtualVariable( "__fake_{$fake_var_discriminator}_offset_var__", $array_arg->value->getAttributes() ), $array_arg->value->getAttributes() ), false, false, $array_arg->getAttributes() ); } if (strpos($mapping_function_id_part, '::') !== false) { $is_instance = false; if ($mapping_function_id_part[0] === '$') { $mapping_function_id_part = substr($mapping_function_id_part, 1); $is_instance = true; } $method_id_parts = explode('::', $mapping_function_id_part); [$callable_fq_class_name, $callable_method_name] = $method_id_parts; if ($is_instance) { $fake_method_call = new VirtualMethodCall( new VirtualVariable( "__fake_{$fake_var_discriminator}_method_call_var__", $function_call_arg->getAttributes() ), new VirtualIdentifier( $callable_method_name, $function_call_arg->getAttributes() ), $fake_args, $function_call_arg->getAttributes() ); $lhs_instance_type = null; $callable_type = $statements_source->node_data->getType($function_call_arg->value); if ($callable_type) { foreach ($callable_type->getAtomicTypes() as $atomic_type) { if ($atomic_type instanceof TKeyedArray && count($atomic_type->properties) === 2 && isset($atomic_type->properties[0]) ) { $lhs_instance_type = clone $atomic_type->properties[0]; } } } $context->vars_in_scope["\$__fake_{$fake_var_discriminator}_offset_var__"] = Type::getMixed(); $context->vars_in_scope["\$__fake_{$fake_var_discriminator}_method_call_var__"] = $lhs_instance_type ?: new Union([new TNamedObject($callable_fq_class_name)]); $fake_method_return_type = self::executeFakeCall( $statements_source, $fake_method_call, $context, $assertions ); } else { $fake_method_call = new VirtualStaticCall( new VirtualFullyQualified( $callable_fq_class_name, $function_call_arg->getAttributes() ), new VirtualIdentifier( $callable_method_name, $function_call_arg->getAttributes() ), $fake_args, $function_call_arg->getAttributes() ); $context->vars_in_scope["\$__fake_{$fake_var_discriminator}_offset_var__"] = Type::getMixed(); $fake_method_return_type = self::executeFakeCall( $statements_source, $fake_method_call, $context, $assertions ); } $function_id_return_type = $fake_method_return_type ?? Type::getMixed(); } else { $fake_function_call = new VirtualFuncCall( new VirtualFullyQualified( $mapping_function_id_part, $function_call_arg->getAttributes() ), $fake_args, $function_call_arg->getAttributes() ); $context->vars_in_scope["\$__fake_{$fake_var_discriminator}_offset_var__"] = Type::getMixed(); $fake_function_return_type = self::executeFakeCall( $statements_source, $fake_function_call, $context, $assertions ); $function_id_return_type = $fake_function_return_type ?? Type::getMixed(); } } if ($clean_context) { self::cleanContext($context, $fake_var_discriminator); } $fake_var_discriminator = null; $mapping_return_type = Type::combineUnionTypes( $function_id_return_type, $mapping_return_type, $codebase ); } return $mapping_return_type; } public static function cleanContext(Context $context, int $fake_var_discriminator): void { foreach ($context->vars_in_scope as $var_in_scope => $_) { if (strpos($var_in_scope, "__fake_{$fake_var_discriminator}_") !== false) { unset($context->vars_in_scope[$var_in_scope]); } } } }
1
12,394
TTemplateParamClass is handled above with the same content in the conditional
vimeo-psalm
php
@@ -597,6 +597,17 @@ class Upgrade } } + // Upgrade CAPTCHA Options + $oldKeys + = ['siteKey', 'publicKey', 'secretKey', 'privateKey']; + foreach ($oldKeys as $key) { + if (isset($newConfig['Captcha'][$key])) { + $newConfig['Captcha']['recaptcha_' . $key] + = $newConfig['Captcha'][$key]; + unset($newConfig['Captcha'][$key]); + } + } + // Warn the user about deprecated WorldCat settings: if (isset($newConfig['WorldCat']['LimitCodes'])) { unset($newConfig['WorldCat']['LimitCodes']);
1
<?php /** * VF Configuration Upgrade Tool * * PHP version 7 * * Copyright (C) Villanova University 2010. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category VuFind * @package Config * @author Demian Katz <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ namespace VuFind\Config; use VuFind\Config\Writer as ConfigWriter; use VuFind\Exception\FileAccess as FileAccessException; /** * Class to upgrade previous VuFind configurations to the current version * * @category VuFind * @package Config * @author Demian Katz <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ class Upgrade { /** * Version we're upgrading from * * @var string */ protected $from; /** * Version we're upgrading to * * @var string */ protected $to; /** * Directory containing configurations to upgrade * * @var string */ protected $oldDir; /** * Directory containing unmodified new configurations * * @var string */ protected $rawDir; /** * Directory where new configurations should be written (null for test mode) * * @var string */ protected $newDir; /** * Parsed old configurations * * @var array */ protected $oldConfigs = []; /** * Processed new configurations * * @var array */ protected $newConfigs = []; /** * Comments parsed from configuration files * * @var array */ protected $comments = []; /** * Warnings generated during upgrade process * * @var array */ protected $warnings = []; /** * Are we upgrading files in place rather than creating them? * * @var bool */ protected $inPlaceUpgrade; /** * Have we modified permissions.ini? * * @var bool */ protected $permissionsModified = false; /** * Constructor * * @param string $from Version we're upgrading from. * @param string $to Version we're upgrading to. * @param string $oldDir Directory containing old configurations. * @param string $rawDir Directory containing raw new configurations. * @param string $newDir Directory to write updated new configurations into * (leave null to disable writes -- used in test mode). */ public function __construct($from, $to, $oldDir, $rawDir, $newDir = null) { $this->from = $from; $this->to = $to; $this->oldDir = $oldDir; $this->rawDir = $rawDir; $this->newDir = $newDir; $this->inPlaceUpgrade = ($this->oldDir == $this->newDir); } /** * Run through all of the necessary upgrading. * * @return void */ public function run() { // Load all old configurations: $this->loadConfigs(); // Upgrade them one by one and write the results to disk; order is // important since in some cases, settings may migrate out of config.ini // and into other files. $this->upgradeConfig(); $this->upgradeAuthority(); $this->upgradeFacetsAndCollection(); $this->upgradeFulltext(); $this->upgradeReserves(); $this->upgradeSearches(); $this->upgradeSitemap(); $this->upgradeSms(); $this->upgradeSummon(); $this->upgradePrimo(); $this->upgradeWorldCat(); // The previous upgrade routines may have added values to permissions.ini, // so we should save it last. It doesn't have its own upgrade routine. $this->saveModifiedConfig('permissions.ini'); // The following routines load special configurations that were not // explicitly loaded by loadConfigs: if ($this->from < 2) { // some pieces only apply to 1.x upgrade! $this->upgradeSolrMarc(); $this->upgradeSearchSpecs(); } $this->upgradeILS(); } /** * Get processed configurations (used by test routines). * * @return array */ public function getNewConfigs() { return $this->newConfigs; } /** * Get warning strings generated during upgrade process. * * @return array */ public function getWarnings() { return $this->warnings; } /** * Add a warning message. * * @param string $msg Warning message. * * @return void */ protected function addWarning($msg) { $this->warnings[] = $msg; } /** * Support function -- merge the contents of two arrays parsed from ini files. * * @param string $config_ini The base config array. * @param string $custom_ini Overrides to apply on top of the base array. * * @return array The merged results. */ public static function iniMerge($config_ini, $custom_ini) { foreach ($custom_ini as $k => $v) { // Make a recursive call if we need to merge array values into an // existing key... otherwise just drop the value in place. if (is_array($v) && isset($config_ini[$k])) { $config_ini[$k] = self::iniMerge($config_ini[$k], $custom_ini[$k]); } else { $config_ini[$k] = $v; } } return $config_ini; } /** * Load the old config.ini settings. * * @return void */ protected function loadOldBaseConfig() { // Load the base settings: $oldIni = $this->oldDir . '/config.ini'; $mainArray = file_exists($oldIni) ? parse_ini_file($oldIni, true) : []; // Merge in local overrides as needed. VuFind 2 structures configurations // differently, so people who used this mechanism will need to refactor // their configurations to take advantage of the new "local directory" // feature. For now, we'll just merge everything to avoid losing settings. if (isset($mainArray['Extra_Config']) && isset($mainArray['Extra_Config']['local_overrides']) ) { $file = trim( $this->oldDir . '/' . $mainArray['Extra_Config']['local_overrides'] ); $localOverride = @parse_ini_file($file, true); if ($localOverride) { $mainArray = self::iniMerge($mainArray, $localOverride); } } // Save the configuration to the appropriate place: $this->oldConfigs['config.ini'] = $mainArray; } /** * Find the path to the old configuration file. * * @param string $filename Filename of configuration file. * * @return string */ protected function getOldConfigPath($filename) { // Check if the user has overridden the filename in the [Extra_Config] // section: $index = str_replace('.ini', '', $filename); if (isset($this->oldConfigs['config.ini']['Extra_Config'][$index])) { $path = $this->oldDir . '/' . $this->oldConfigs['config.ini']['Extra_Config'][$index]; if (file_exists($path) && is_file($path)) { return $path; } } return $this->oldDir . '/' . $filename; } /** * Load all of the user's existing configurations. * * @return void */ protected function loadConfigs() { // Configuration files to load. Note that config.ini must always be loaded // first so that getOldConfigPath can work properly! $configs = ['config.ini']; foreach (glob($this->rawDir . '/*.ini') as $ini) { $parts = explode('/', str_replace('\\', '/', $ini)); $filename = array_pop($parts); if ($filename !== 'config.ini') { $configs[] = $filename; } } foreach ($configs as $config) { // Special case for config.ini, since we may need to overlay extra // settings: if ($config == 'config.ini') { $this->loadOldBaseConfig(); } else { $path = $this->getOldConfigPath($config); $this->oldConfigs[$config] = file_exists($path) ? parse_ini_file($path, true) : []; } $this->newConfigs[$config] = parse_ini_file($this->rawDir . '/' . $config, true); $this->comments[$config] = $this->extractComments($this->rawDir . '/' . $config); } } /** * Apply settings from an old configuration to a new configuration. * * @param string $filename Name of the configuration being updated. * @param array $fullSections Array of section names that need to be fully * overridden (as opposed to overridden on a setting-by-setting basis). * * @return void */ protected function applyOldSettings($filename, $fullSections = []) { // First override all individual settings: foreach ($this->oldConfigs[$filename] as $section => $subsection) { foreach ($subsection as $key => $value) { $this->newConfigs[$filename][$section][$key] = $value; } } // Now override on a section-by-section basis where necessary: foreach ($fullSections as $section) { $this->newConfigs[$filename][$section] = isset($this->oldConfigs[$filename][$section]) ? $this->oldConfigs[$filename][$section] : []; } } /** * Save a modified configuration file. * * @param string $filename Name of config file to write (contents will be * pulled from current state of object properties). * * @throws FileAccessException * @return void */ protected function saveModifiedConfig($filename) { if (null === $this->newDir) { // skip write if no destination return; } // If we're doing an in-place upgrade, and the source file is empty, // there is no point in upgrading anything (the file doesn't exist). if (empty($this->oldConfigs[$filename]) && $this->inPlaceUpgrade) { // Special case: if we set up custom permissions, we need to // write the file even if it didn't previously exist. if (!$this->permissionsModified || $filename !== 'permissions.ini') { return; } } // If target file already exists, back it up: $outfile = $this->newDir . '/' . $filename; $bakfile = $outfile . '.bak.' . time(); if (file_exists($outfile) && !copy($outfile, $bakfile)) { throw new FileAccessException( "Error: Could not copy {$outfile} to {$bakfile}." ); } $writer = new ConfigWriter( $outfile, $this->newConfigs[$filename], $this->comments[$filename] ); if (!$writer->save()) { throw new FileAccessException( "Error: Problem writing to {$outfile}." ); } } /** * Save an unmodified configuration file -- copy the old version, unless it is * the same as the new version! * * @param string $filename Path to the old config file * * @throws FileAccessException * @return void */ protected function saveUnmodifiedConfig($filename) { if (null === $this->newDir) { // skip write if no destination return; } if ($this->inPlaceUpgrade) { // skip write if doing in-place upgrade return; } // Figure out directories for all versions of this config file: $src = $this->getOldConfigPath($filename); $raw = $this->rawDir . '/' . $filename; $dest = $this->newDir . '/' . $filename; // Compare the source file against the raw file; if they happen to be the // same, we don't need to copy anything! if (file_exists($src) && file_exists($raw) && md5(file_get_contents($src)) == md5(file_get_contents($raw)) ) { return; } // If we got this far, we need to copy the user's file into place: if (file_exists($src) && !copy($src, $dest)) { throw new FileAccessException( "Error: Could not copy {$src} to {$dest}." ); } } /** * Check for invalid theme setting. * * @param string $setting Name of setting in [Site] section to check. * @param string $default Default value to use if invalid option was found. * * @return void */ protected function checkTheme($setting, $default = null) { // If a setting is not set, there is nothing to check: $theme = isset($this->newConfigs['config.ini']['Site'][$setting]) ? $this->newConfigs['config.ini']['Site'][$setting] : null; if (empty($theme)) { return; } $parts = explode(',', $theme); $theme = trim($parts[0]); if (!file_exists(APPLICATION_PATH . '/themes/' . $theme) || !is_dir(APPLICATION_PATH . '/themes/' . $theme) ) { if ($default === null) { $this->addWarning( "WARNING: This version of VuFind does not support the {$theme} " . "theme. As such, we have disabled your {$setting} setting." ); unset($this->newConfigs['config.ini']['Site'][$setting]); } else { $this->addWarning( "WARNING: This version of VuFind does not support " . "the {$theme} theme. Your config.ini [Site] {$setting} setting" . " has been reset to the default: {$default}. You may need to " . "reimplement your custom theme." ); $this->newConfigs['config.ini']['Site'][$setting] = $default; } } } /** * Is this a default BulkExport options setting? * * @param string $eo Bulk export options * * @return bool */ protected function isDefaultBulkExportOptions($eo) { $from = (float)$this->from; if ($from >= 2.4) { $default = 'MARC:MARCXML:EndNote:EndNoteWeb:RefWorks:BibTeX:RIS'; } elseif ($from >= 2.0) { $default = 'MARC:MARCXML:EndNote:EndNoteWeb:RefWorks:BibTeX'; } elseif ($from >= 1.4) { $default = 'MARC:MARCXML:EndNote:RefWorks:BibTeX'; } elseif ($from >= 1.3) { $default = 'MARC:EndNote:RefWorks:BibTeX'; } elseif ($from >= 1.2) { $default = 'MARC:EndNote:BibTeX'; } else { $default = 'MARC:EndNote'; } return $eo == $default; } /** * Add warnings if Amazon problems were found. * * @param array $config Configuration to check * * @return void */ protected function checkAmazonConfig($config) { // Warn the user if they have Amazon enabled but do not have the appropriate // credentials set up. $hasAmazonReview = isset($config['Content']['reviews']) && stristr($config['Content']['reviews'], 'amazon'); $hasAmazonCover = isset($config['Content']['coverimages']) && stristr($config['Content']['coverimages'], 'amazon'); if ($hasAmazonReview || $hasAmazonCover) { if (!isset($config['Content']['amazonsecret'])) { $this->addWarning( 'WARNING: You have Amazon content enabled but are missing ' . 'the required amazonsecret setting in the [Content] section ' . 'of config.ini' ); } if (!isset($config['Content']['amazonassociate'])) { $this->addWarning( 'WARNING: You have Amazon content enabled but are missing ' . 'the required amazonassociate setting in the [Content] section' . ' of config.ini' ); } } } /** * Upgrade config.ini. * * @throws FileAccessException * @return void */ protected function upgradeConfig() { // override new version's defaults with matching settings from old version: $this->applyOldSettings('config.ini'); // Set up reference for convenience (and shorter lines): $newConfig = & $this->newConfigs['config.ini']; // If the [BulkExport] options setting is present and non-default, warn // the user about its deprecation. if (isset($newConfig['BulkExport']['options'])) { $default = $this->isDefaultBulkExportOptions( $newConfig['BulkExport']['options'] ); if (!$default) { $this->addWarning( 'The [BulkExport] options setting is deprecated; please ' . 'customize the [Export] section instead.' ); } unset($newConfig['BulkExport']['options']); } // If [Statistics] is present, warn the user about its deprecation. if (isset($newConfig['Statistics'])) { $this->addWarning( 'The Statistics module has been removed from Vufind. ' . 'For usage tracking, please configure Google Analytics or Piwik.' ); unset($newConfig['Statistics']); } // Warn the user about Amazon configuration issues: $this->checkAmazonConfig($newConfig); // Warn the user if they have enabled a deprecated Google API: if (isset($newConfig['GoogleSearch'])) { unset($newConfig['GoogleSearch']); $this->addWarning( 'The [GoogleSearch] section of config.ini is no ' . 'longer supported due to changes in Google APIs.' ); } if (isset($newConfig['Content']['recordMap']) && 'google' == strtolower($newConfig['Content']['recordMap']) ) { unset($newConfig['Content']['recordMap']); unset($newConfig['Content']['googleMapApiKey']); $this->addWarning( 'Google Maps is no longer a supported Content/recordMap option;' . ' please review your config.ini.' ); } if (isset($newConfig['GoogleAnalytics']['apiKey'])) { if (!isset($newConfig['GoogleAnalytics']['universal']) || !$newConfig['GoogleAnalytics']['universal'] ) { $this->addWarning( 'The [GoogleAnalytics] universal setting is off. See config.ini ' . 'for important information on how to upgrade your Analytics.' ); } } // Warn the user about deprecated WorldCat settings: if (isset($newConfig['WorldCat']['LimitCodes'])) { unset($newConfig['WorldCat']['LimitCodes']); $this->addWarning( 'The [WorldCat] LimitCodes setting never had any effect and has been' . ' removed.' ); } $badKeys = ['id', 'xISBN_token', 'xISBN_secret', 'xISSN_token', 'xISSN_secret']; foreach ($badKeys as $key) { if (isset($newConfig['WorldCat'][$key])) { unset($newConfig['WorldCat'][$key]); $this->addWarning( 'The [WorldCat] ' . $key . ' setting is no longer used and' . ' has been removed.' ); } } if (isset($newConfig['Record']['related']) && in_array('Editions', $newConfig['Record']['related']) ) { $newConfig['Record']['related'] = array_diff( $newConfig['Record']['related'], ['Editions'] ); $this->addWarning( 'The Editions related record module is no longer ' . 'supported due to OCLC\'s xID API shutdown.' . ' It has been removed from your settings.' ); } // Upgrade Google Options: if (isset($newConfig['Content']['GoogleOptions']) && !is_array($newConfig['Content']['GoogleOptions']) ) { $newConfig['Content']['GoogleOptions'] = ['link' => $newConfig['Content']['GoogleOptions']]; } // Disable unused, obsolete setting: unset($newConfig['Index']['local']); // Warn the user if they are using an unsupported theme: $this->checkTheme('theme', 'bootprint3'); $this->checkTheme('mobile_theme', null); // Translate legacy auth settings: if (strtolower($newConfig['Authentication']['method']) == 'db') { $newConfig['Authentication']['method'] = 'Database'; } if (strtolower($newConfig['Authentication']['method']) == 'sip') { $newConfig['Authentication']['method'] = 'SIP2'; } // Translate legacy session settings: $newConfig['Session']['type'] = ucwords( str_replace('session', '', strtolower($newConfig['Session']['type'])) ); if ($newConfig['Session']['type'] == 'Mysql') { $newConfig['Session']['type'] = 'Database'; } // Eliminate obsolete database settings: $newConfig['Database'] = ['database' => $newConfig['Database']['database']]; // Eliminate obsolete config override settings: unset($newConfig['Extra_Config']); // Update generator if it is default value: if (isset($newConfig['Site']['generator']) && $newConfig['Site']['generator'] == 'VuFind ' . $this->from ) { $newConfig['Site']['generator'] = 'VuFind ' . $this->to; } // Update Syndetics config: if (isset($newConfig['Syndetics']['url'])) { $newConfig['Syndetics']['use_ssl'] = (strpos($newConfig['Syndetics']['url'], 'https://') === false) ? '' : 1; unset($newConfig['Syndetics']['url']); } // Translate obsolete permission settings: $this->upgradeAdminPermissions(); // Deal with shard settings (which may have to be moved to another file): $this->upgradeShardSettings(); // save the file $this->saveModifiedConfig('config.ini'); } /** * Translate obsolete permission settings. * * @return void */ protected function upgradeAdminPermissions() { $config = & $this->newConfigs['config.ini']; $permissions = & $this->newConfigs['permissions.ini']; if (isset($config['AdminAuth'])) { $permissions['access.AdminModule'] = []; if (isset($config['AdminAuth']['ipRegEx'])) { $permissions['access.AdminModule']['ipRegEx'] = $config['AdminAuth']['ipRegEx']; } if (isset($config['AdminAuth']['userWhitelist'])) { $permissions['access.AdminModule']['username'] = $config['AdminAuth']['userWhitelist']; } // If no settings exist in config.ini, we grant access to everyone // by allowing both logged-in and logged-out roles. if (empty($permissions['access.AdminModule'])) { $permissions['access.AdminModule']['role'] = ['guest', 'loggedin']; } $permissions['access.AdminModule']['permission'] = 'access.AdminModule'; $this->permissionsModified = true; // Remove any old settings remaining in config.ini: unset($config['AdminAuth']); } } /** * Change an array key. * * @param array $array Array to rewrite * @param string $old Old key name * @param string $new New key name * * @return array */ protected function changeArrayKey($array, $old, $new) { $newArr = []; foreach ($array as $k => $v) { if ($k === $old) { $k = $new; } $newArr[$k] = $v; } return $newArr; } /** * Support method for upgradeFacetsAndCollection() - change the name of * a facet field. * * @param string $old Old field name * @param string $new New field name * * @return void */ protected function renameFacet($old, $new) { $didWork = false; if (isset($this->newConfigs['facets.ini']['Results'][$old])) { $this->newConfigs['facets.ini']['Results'] = $this->changeArrayKey( $this->newConfigs['facets.ini']['Results'], $old, $new ); $didWork = true; } if (isset($this->newConfigs['Collection.ini']['Facets'][$old])) { $this->newConfigs['Collection.ini']['Facets'] = $this->changeArrayKey( $this->newConfigs['Collection.ini']['Facets'], $old, $new ); $didWork = true; } if ($didWork) { $this->newConfigs['facets.ini']['LegacyFields'][$old] = $new; } } /** * Upgrade facets.ini and Collection.ini (since these are tied together). * * @throws FileAccessException * @return void */ protected function upgradeFacetsAndCollection() { // we want to retain the old installation's various facet groups // exactly as-is $facetGroups = [ 'Results', 'ResultsTop', 'Advanced', 'Author', 'CheckboxFacets', 'HomePage' ]; $this->applyOldSettings('facets.ini', $facetGroups); $this->applyOldSettings('Collection.ini', ['Facets', 'Sort']); // fill in home page facets with advanced facets if missing: if (!isset($this->oldConfigs['facets.ini']['HomePage'])) { $this->newConfigs['facets.ini']['HomePage'] = $this->newConfigs['facets.ini']['Advanced']; } // rename changed facets $this->renameFacet('authorStr', 'author_facet'); // save the file $this->saveModifiedConfig('facets.ini'); $this->saveModifiedConfig('Collection.ini'); } /** * Update an old VuFind 1.x-style autocomplete handler name to the new style. * * @param string $name Name of module. * * @return string */ protected function upgradeAutocompleteName($name) { if ($name == 'NoAutocomplete') { return 'None'; } return str_replace('Autocomplete', '', $name); } /** * Upgrade searches.ini. * * @throws FileAccessException * @return void */ protected function upgradeSearches() { // we want to retain the old installation's Basic/Advanced search settings // and sort settings exactly as-is $groups = [ 'Basic_Searches', 'Advanced_Searches', 'Sorting', 'DefaultSortingByType' ]; $this->applyOldSettings('searches.ini', $groups); // Fix autocomplete settings in case they use the old style: $newConfig = & $this->newConfigs['searches.ini']; if (isset($newConfig['Autocomplete']['default_handler'])) { $newConfig['Autocomplete']['default_handler'] = $this->upgradeAutocompleteName( $newConfig['Autocomplete']['default_handler'] ); } if (isset($newConfig['Autocomplete_Types'])) { foreach ($newConfig['Autocomplete_Types'] as $k => $v) { $parts = explode(':', $v); $parts[0] = $this->upgradeAutocompleteName($parts[0]); $newConfig['Autocomplete_Types'][$k] = implode(':', $parts); } } // fix call number sort settings: if (isset($newConfig['Sorting']['callnumber'])) { $newConfig['Sorting']['callnumber-sort'] = $newConfig['Sorting']['callnumber']; unset($newConfig['Sorting']['callnumber']); } if (isset($newConfig['DefaultSortingByType'])) { foreach ($newConfig['DefaultSortingByType'] as & $v) { if ($v === 'callnumber') { $v = 'callnumber-sort'; } } } $this->upgradeSpellingSettings('searches.ini', ['CallNumber']); // save the file $this->saveModifiedConfig('searches.ini'); } /** * Upgrade spelling settings to account for refactoring of spelling as a * recommendation module starting in release 2.4. * * @param string $ini .ini file to modify * @param array $skip Keys to skip within [TopRecommendations] * * @return void */ protected function upgradeSpellingSettings($ini, $skip = []) { // Turn on the spelling recommendations if we're upgrading from a version // prior to 2.4. if ((float)$this->from < 2.4) { // Fix defaults in general section: $cfg = & $this->newConfigs[$ini]['General']; $keys = ['default_top_recommend', 'default_noresults_recommend']; foreach ($keys as $key) { if (!isset($cfg[$key])) { $cfg[$key] = []; } if (!in_array('SpellingSuggestions', $cfg[$key])) { $cfg[$key][] = 'SpellingSuggestions'; } } // Fix settings in [TopRecommendations] $cfg = & $this->newConfigs[$ini]['TopRecommendations']; // Add SpellingSuggestions to all non-skipped handlers: foreach ($cfg as $key => & $value) { if (!in_array($key, $skip) && !in_array('SpellingSuggestions', $value) ) { $value[] = 'SpellingSuggestions'; } } // Define handlers with no spelling support as the default minus the // Spelling option: foreach ($skip as $key) { if (!isset($cfg[$key])) { $cfg[$key] = array_diff( $this->newConfigs[$ini]['General']['default_top_recommend'], ['SpellingSuggestions'] ); } } } } /** * Upgrade fulltext.ini. * * @throws FileAccessException * @return void */ protected function upgradeFulltext() { $this->saveUnmodifiedConfig('fulltext.ini'); } /** * Upgrade sitemap.ini. * * @throws FileAccessException * @return void */ protected function upgradeSitemap() { $this->saveUnmodifiedConfig('sitemap.ini'); } /** * Upgrade sms.ini. * * @throws FileAccessException * @return void */ protected function upgradeSms() { $this->applyOldSettings('sms.ini', ['Carriers']); $this->saveModifiedConfig('sms.ini'); } /** * Upgrade authority.ini. * * @throws FileAccessException * @return void */ protected function upgradeAuthority() { // we want to retain the old installation's search and facet settings // exactly as-is $groups = [ 'Facets', 'Basic_Searches', 'Advanced_Searches', 'Sorting' ]; $this->applyOldSettings('authority.ini', $groups); // save the file $this->saveModifiedConfig('authority.ini'); } /** * Upgrade reserves.ini. * * @throws FileAccessException * @return void */ protected function upgradeReserves() { // If Reserves module is disabled, don't bother updating config: if (!isset($this->newConfigs['config.ini']['Reserves']['search_enabled']) || !$this->newConfigs['config.ini']['Reserves']['search_enabled'] ) { return; } // we want to retain the old installation's search and facet settings // exactly as-is $groups = [ 'Facets', 'Basic_Searches', 'Advanced_Searches', 'Sorting' ]; $this->applyOldSettings('reserves.ini', $groups); // save the file $this->saveModifiedConfig('reserves.ini'); } /** * Upgrade Summon.ini. * * @throws FileAccessException * @return void */ protected function upgradeSummon() { // If Summon is disabled in our current configuration, we don't need to // load any Summon-specific settings: if (!isset($this->newConfigs['config.ini']['Summon']['apiKey'])) { return; } // we want to retain the old installation's search and facet settings // exactly as-is $groups = [ 'Facets', 'FacetsTop', 'Basic_Searches', 'Advanced_Searches', 'Sorting' ]; $this->applyOldSettings('Summon.ini', $groups); // Turn on advanced checkbox facets if we're upgrading from a version // prior to 2.3. if ((float)$this->from < 2.3) { $cfg = & $this->newConfigs['Summon.ini']['Advanced_Facet_Settings']; if (!isset($cfg['special_facets']) || empty($cfg['special_facets'])) { $cfg['special_facets'] = 'checkboxes:Summon'; } elseif (false === strpos('checkboxes', $cfg['special_facets'])) { $cfg['special_facets'] .= ',checkboxes:Summon'; } } // update permission settings $this->upgradeSummonPermissions(); $this->upgradeSpellingSettings('Summon.ini'); // save the file $this->saveModifiedConfig('Summon.ini'); } /** * Translate obsolete permission settings. * * @return void */ protected function upgradeSummonPermissions() { $config = & $this->newConfigs['Summon.ini']; $permissions = & $this->newConfigs['permissions.ini']; if (isset($config['Auth'])) { $permissions['access.SummonExtendedResults'] = []; if (isset($config['Auth']['check_login']) && $config['Auth']['check_login'] ) { $permissions['access.SummonExtendedResults']['role'] = ['loggedin']; } if (isset($config['Auth']['ip_range'])) { $permissions['access.SummonExtendedResults']['ipRegEx'] = $config['Auth']['ip_range']; } if (!empty($permissions['access.SummonExtendedResults'])) { $permissions['access.SummonExtendedResults']['boolean'] = 'OR'; $permissions['access.SummonExtendedResults']['permission'] = 'access.SummonExtendedResults'; $this->permissionsModified = true; } else { unset($permissions['access.SummonExtendedResults']); } // Remove any old settings remaining in Summon.ini: unset($config['Auth']); } } /** * Upgrade Primo.ini. * * @throws FileAccessException * @return void */ protected function upgradePrimo() { // we want to retain the old installation's search and facet settings // exactly as-is $groups = [ 'Facets', 'FacetsTop', 'Basic_Searches', 'Advanced_Searches', 'Sorting' ]; $this->applyOldSettings('Primo.ini', $groups); // update permission settings $this->upgradePrimoPermissions(); // update server settings $this->upgradePrimoServerSettings(); // save the file $this->saveModifiedConfig('Primo.ini'); } /** * Translate obsolete permission settings. * * @return void */ protected function upgradePrimoPermissions() { $config = & $this->newConfigs['Primo.ini']; $permissions = & $this->newConfigs['permissions.ini']; if (isset($config['Institutions']['code']) && isset($config['Institutions']['regex']) ) { $codes = $config['Institutions']['code']; $regex = $config['Institutions']['regex']; if (count($regex) != count($codes)) { $this->addWarning( 'Mismatched code/regex counts in Primo.ini [Institutions].' ); } // Map parallel arrays into code => array of regexes and detect // wildcard regex to treat as default code. $map = []; $default = null; foreach ($codes as $i => $code) { if ($regex[$i] == '/.*/') { $default = $code; } else { $map[$code] = !isset($map[$code]) ? [$regex[$i]] : array_merge($map[$code], [$regex[$i]]); } } foreach ($map as $code => $regexes) { $perm = "access.PrimoInstitution.$code"; $config['Institutions']["onCampusRule['$code']"] = $perm; $permissions[$perm] = [ 'ipRegEx' => count($regexes) == 1 ? $regexes[0] : $regexes, 'permission' => $perm, ]; $this->permissionsModified = true; } if (null !== $default) { $config['Institutions']['defaultCode'] = $default; } // Remove any old settings remaining in Primo.ini: unset($config['Institutions']['code']); unset($config['Institutions']['regex']); } } /** * Translate obsolete server settings. * * @return void */ protected function upgradePrimoServerSettings() { $config = & $this->newConfigs['Primo.ini']; // Convert apiId to url if (isset($config['General']['apiId'])) { $url = 'http://' . $config['General']['apiId'] . '.hosted.exlibrisgroup.com'; if (isset($config['General']['port'])) { $url .= ':' . $config['General']['port']; } else { $url .= ':1701'; } $config['General']['url'] = $url; // Remove any old settings remaining in Primo.ini: unset($config['General']['apiId']); unset($config['General']['port']); } } /** * Upgrade WorldCat.ini. * * @throws FileAccessException * @return void */ protected function upgradeWorldCat() { // If WorldCat is disabled in our current configuration, we don't need to // load any WorldCat-specific settings: if (!isset($this->newConfigs['config.ini']['WorldCat']['apiKey'])) { return; } // we want to retain the old installation's search settings exactly as-is $groups = [ 'Basic_Searches', 'Advanced_Searches', 'Sorting' ]; $this->applyOldSettings('WorldCat.ini', $groups); // we need to fix an obsolete search setting for authors foreach (['Basic_Searches', 'Advanced_Searches'] as $section) { $new = []; foreach ($this->newConfigs['WorldCat.ini'][$section] as $k => $v) { if ($k == 'srw.au:srw.pn:srw.cn') { $k = 'srw.au'; } $new[$k] = $v; } $this->newConfigs['WorldCat.ini'][$section] = $new; } // Deal with deprecated related record module. $newConfig = & $this->newConfigs['WorldCat.ini']; if (isset($newConfig['Record']['related']) && in_array('WorldCatEditions', $newConfig['Record']['related']) ) { $newConfig['Record']['related'] = array_diff( $newConfig['Record']['related'], ['WorldCatEditions'] ); $this->addWarning( 'The WorldCatEditions related record module is no longer ' . 'supported due to OCLC\'s xID API shutdown.' . ' It has been removed from your settings.' ); } // save the file $this->saveModifiedConfig('WorldCat.ini'); } /** * Does the specified properties file contain any meaningful * (non-empty/non-comment) lines? * * @param string $src File to check * * @return bool */ protected function fileContainsMeaningfulLines($src) { // Does the file contain any meaningful lines? foreach (file($src) as $line) { $line = trim($line); if (!empty($line) && substr($line, 0, 1) != '#') { return true; } } return false; } /** * Upgrade SolrMarc configurations. * * @throws FileAccessException * @return void */ protected function upgradeSolrMarc() { if (null === $this->newDir) { // skip this step if no write destination return; } // Is there a marc_local.properties file? $src = realpath($this->oldDir . '/../../import/marc_local.properties'); if (empty($src) || !file_exists($src)) { return; } // Copy the file if it contains customizations: if ($this->fileContainsMeaningfulLines($src)) { $dest = realpath($this->newDir . '/../../import') . '/marc_local.properties'; if (!copy($src, $dest) || !file_exists($dest)) { throw new FileAccessException( "Cannot copy {$src} to {$dest}." ); } } } /** * Upgrade .yaml configurations. * * @throws FileAccessException * @return void */ protected function upgradeSearchSpecs() { if (null === $this->newDir) { // skip this step if no write destination return; } // VuFind 1.x uses *_local.yaml files as overrides; VuFind 2.x uses files // with the same filename in the local directory. Copy any old override // files into the new expected location: $files = ['searchspecs', 'authsearchspecs', 'reservessearchspecs']; foreach ($files as $file) { $old = $this->oldDir . '/' . $file . '_local.yaml'; $new = $this->newDir . '/' . $file . '.yaml'; if (file_exists($old)) { if (!copy($old, $new)) { throw new FileAccessException( "Cannot copy {$old} to {$new}." ); } } } } /** * Upgrade ILS driver configuration. * * @throws FileAccessException * @return void */ protected function upgradeILS() { $driver = isset($this->newConfigs['config.ini']['Catalog']['driver']) ? $this->newConfigs['config.ini']['Catalog']['driver'] : ''; if (empty($driver)) { $this->addWarning("WARNING: Could not find ILS driver setting."); } elseif ('Sample' == $driver) { // No configuration file for Sample driver } elseif (!file_exists($this->oldDir . '/' . $driver . '.ini')) { $this->addWarning( "WARNING: Could not find {$driver}.ini file; " . "check your ILS driver configuration." ); } else { $this->saveUnmodifiedConfig($driver . '.ini'); } // If we're set to load NoILS.ini on failure, copy that over as well: if (isset($this->newConfigs['config.ini']['Catalog']['loadNoILSOnFailure']) && $this->newConfigs['config.ini']['Catalog']['loadNoILSOnFailure'] ) { // If NoILS is also the main driver, we don't need to copy it twice: if ($driver != 'NoILS') { $this->saveUnmodifiedConfig('NoILS.ini'); } } } /** * Upgrade shard settings (they have moved to a different config file, so * this is handled as a separate method so that all affected settings are * addressed in one place. * * This gets called from updateConfig(), which gets called before other * configuration upgrade routines. This means that we need to modify the * config.ini settings in the newConfigs property (since it is currently * being worked on and will be written to disk shortly), but we need to * modify the searches.ini/facets.ini settings in the oldConfigs property * (because they have not been processed yet). * * @return void */ protected function upgradeShardSettings() { // move settings from config.ini to searches.ini: if (isset($this->newConfigs['config.ini']['IndexShards'])) { $this->oldConfigs['searches.ini']['IndexShards'] = $this->newConfigs['config.ini']['IndexShards']; unset($this->newConfigs['config.ini']['IndexShards']); } if (isset($this->newConfigs['config.ini']['ShardPreferences'])) { $this->oldConfigs['searches.ini']['ShardPreferences'] = $this->newConfigs['config.ini']['ShardPreferences']; unset($this->newConfigs['config.ini']['ShardPreferences']); } // move settings from facets.ini to searches.ini (merging StripFacets // setting with StripFields setting): if (isset($this->oldConfigs['facets.ini']['StripFacets'])) { if (!isset($this->oldConfigs['searches.ini']['StripFields'])) { $this->oldConfigs['searches.ini']['StripFields'] = []; } foreach ($this->oldConfigs['facets.ini']['StripFacets'] as $k => $v) { // If we already have values for the current key, merge and dedupe: if (isset($this->oldConfigs['searches.ini']['StripFields'][$k])) { $v .= ',' . $this->oldConfigs['searches.ini']['StripFields'][$k]; $parts = explode(',', $v); foreach ($parts as $i => $part) { $parts[$i] = trim($part); } $v = implode(',', array_unique($parts)); } $this->oldConfigs['searches.ini']['StripFields'][$k] = $v; } unset($this->oldConfigs['facets.ini']['StripFacets']); } } /** * Read the specified file and return an associative array of this format * containing all comments extracted from the file: * * [ * 'sections' => array * 'section_name_1' => array * 'before' => string ("Comments found at the beginning of this section") * 'inline' => string ("Comments found at the end of the section's line") * 'settings' => array * 'setting_name_1' => array * 'before' => string ("Comments found before this setting") * 'inline' => string ("Comments found at the end of setting's line") * ... * 'setting_name_n' => array (same keys as setting_name_1) * ... * 'section_name_n' => array (same keys as section_name_1) * 'after' => string ("Comments found at the very end of the file") * ] * * @param string $filename Name of ini file to read. * * @return array Associative array as described above. */ protected function extractComments($filename) { $lines = file($filename); // Initialize our return value: $retVal = ['sections' => [], 'after' => '']; // Initialize variables for tracking status during parsing: $section = $comments = ''; foreach ($lines as $line) { // To avoid redundant processing, create a trimmed version of the current // line: $trimmed = trim($line); // Is the current line a comment? If so, add to the currentComments // string. Note that we treat blank lines as comments. if (substr($trimmed, 0, 1) == ';' || empty($trimmed)) { $comments .= $line; } elseif (substr($trimmed, 0, 1) == '[' && ($closeBracket = strpos($trimmed, ']')) > 1 ) { // Is the current line the start of a section? If so, create the // appropriate section of the return value: $section = substr($trimmed, 1, $closeBracket - 1); if (!empty($section)) { // Grab comments at the end of the line, if any: if (($semicolon = strpos($trimmed, ';')) !== false) { $inline = trim(substr($trimmed, $semicolon)); } else { $inline = ''; } $retVal['sections'][$section] = [ 'before' => $comments, 'inline' => $inline, 'settings' => []]; $comments = ''; } } elseif (($equals = strpos($trimmed, '=')) !== false) { // Is the current line a setting? If so, add to the return value: $set = trim(substr($trimmed, 0, $equals)); $set = trim(str_replace('[]', '', $set)); if (!empty($section) && !empty($set)) { // Grab comments at the end of the line, if any: if (($semicolon = strpos($trimmed, ';')) !== false) { $inline = trim(substr($trimmed, $semicolon)); } else { $inline = ''; } // Currently, this data structure doesn't support arrays very // well, since it can't distinguish which line of the array // corresponds with which comments. For now, we just append all // the preceding and inline comments together for arrays. Since // we rarely use arrays in the config.ini file, this isn't a big // concern, but we should improve it if we ever need to. if (!isset($retVal['sections'][$section]['settings'][$set])) { $retVal['sections'][$section]['settings'][$set] = ['before' => $comments, 'inline' => $inline]; } else { $retVal['sections'][$section]['settings'][$set]['before'] .= $comments; $retVal['sections'][$section]['settings'][$set]['inline'] .= "\n" . $inline; } $comments = ''; } } } // Store any leftover comments following the last setting: $retVal['after'] = $comments; return $retVal; } }
1
29,047
Do we need to add `theme` here?
vufind-org-vufind
php
@@ -160,6 +160,9 @@ type Network struct { // VPC defines the cluster vpc. VPC *VPC `json:"vpc"` + // InternetGatewayID is the id of the internet gateway associated with the VPC. + InternetGatewayID *string `json:"internetGatewayId"` + // Subnets includes all the subnets defined inside the VPC. Subnets Subnets `json:"subnets"` }
1
// Copyright © 2018 The Kubernetes Authors. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package v1alpha1 import ( corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // AWSMachineProviderConfig is the type that will be embedded in a Machine.Spec.ProviderConfig field // for an AWS instance. It is used by the AWS machine actuator to create a single machine instance, // using the RunInstances call (https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_RunInstances.html) // Required parameters such as region that are not specified by this configuration, will be defaulted // by the actuator. // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type AWSMachineProviderConfig struct { metav1.TypeMeta `json:",inline"` // AMI is the reference to the AMI from which to create the machine instance. AMI AWSResourceReference `json:"ami"` // InstanceType is the type of instance to create. Example: m4.xlarge InstanceType string `json:"instanceType"` // AdditionalTags is the set of tags to add to an instance, in addition to the ones // added by default by the actuator. These tags are additive. The actuator will ensure // these tags are present, but will not remove any other tags that may exist on the // instance. // +optional AdditionalTags map[string]string `json:"additionalTags,omitempty"` // IAMInstanceProfile is a reference to an IAM role to assign to the instance // +optional IAMInstanceProfile *AWSResourceReference `json:"iamInstanceProfile,omitempty"` // PublicIP specifies whether the instance should get a public IP. // Precedence for this setting is as follows: // 1. This field if set // 2. Cluster/flavor setting // 3. Subnet default // +optional PublicIP *bool `json:"publicIP,omitempty"` // AdditionalSecurityGroups is an array of references to security groups that should be applied to the // instance. These security groups would be set in addition to any security groups defined // at the cluster level or in the actuator. // +optional AdditionalSecurityGroups []AWSResourceReference `json:"additionalSecurityGroups,omitempty"` // Subnet is a reference to the subnet to use for this instance. If not specified, // the cluster subnet will be used. // +optional Subnet *AWSResourceReference `json:"subnet,omitempty"` } // AWSResourceReference is a reference to a specific AWS resource by ID, ARN, or filters. // Only one of ID, ARN or Filters may be specified. Specifying more than one will result in // a validation error. type AWSResourceReference struct { // ID of resource // +optional ID *string `json:"id,omitempty"` // ARN of resource // +optional ARN *string `json:"arn,omitempty"` // Filters is a set of key/value pairs used to identify a resource // They are applied according to the rules defined by the AWS API: // https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Filtering.html // +optional Filters []Filter `json:"filters"` } // Filter is a filter used to identify an AWS resource type Filter struct { // Name of the filter. Filter names are case-sensitive. Name string `json:"name"` // Values includes one or more filter values. Filter values are case-sensitive. Values []string `json:"values"` } // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type AWSClusterProviderConfig struct { metav1.TypeMeta `json:",inline"` } // AWSMachineProviderStatus is the type that will be embedded in a Machine.Status.ProviderStatus field. // It containsk AWS-specific status information. // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type AWSMachineProviderStatus struct { metav1.TypeMeta `json:",inline"` // InstanceID is the instance ID of the machine created in AWS // +optional InstanceID *string `json:"instanceID,omitempty"` // InstanceState is the state of the AWS instance for this machine // +optional InstanceState *string `json:"instanceState,omitempty"` // Conditions is a set of conditions associated with the Machine to indicate // errors or other status // +optional Conditions []AWSMachineProviderCondition `json:"conditions,omitempty"` } // AWSMachineProviderConditionType is a valid value for AWSMachineProviderCondition.Type type AWSMachineProviderConditionType string // Valid conditions for an AWS machine instance const ( // MachineCreated indicates whether the machine has been created or not. If not, // it should include a reason and message for the failure. MachineCreated AWSMachineProviderConditionType = "MachineCreated" ) // AWSMachineProviderCondition is a condition in a AWSMachineProviderStatus type AWSMachineProviderCondition struct { // Type is the type of the condition. Type AWSMachineProviderConditionType `json:"type"` // Status is the status of the condition. Status corev1.ConditionStatus `json:"status"` // LastProbeTime is the last time we probed the condition. // +optional LastProbeTime metav1.Time `json:"lastProbeTime"` // LastTransitionTime is the last time the condition transitioned from one status to another. // +optional LastTransitionTime metav1.Time `json:"lastTransitionTime"` // Reason is a unique, one-word, CamelCase reason for the condition's last transition. // +optional Reason string `json:"reason"` // Message is a human-readable message indicating details about last transition. // +optional Message string `json:"message"` } // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type AWSClusterProviderStatus struct { metav1.TypeMeta `json:",inline"` Network Network `json:"network"` } // Network encapsulates AWS networking resources. type Network struct { // VPC defines the cluster vpc. VPC *VPC `json:"vpc"` // Subnets includes all the subnets defined inside the VPC. Subnets Subnets `json:"subnets"` } // VPC defines an AWS vpc. type VPC struct { ID string `json:"id"` CidrBlock string `json:"cidrBlock"` } // Subnet defines an AWS subnet attached to a VPC. type Subnet struct { ID string `json:"id"` VpcID string `json:"vpcId"` AvailabilityZone string `json:"availabilityZone"` CidrBlock string `json:"cidrBlock"` IsPublic bool `json:"public"` } // Subnets is a slice of Subnet. type Subnets []*Subnet // FilterPrivate returns a slice containing all subnets marked as private. func (s Subnets) FilterPrivate() (res []*Subnet) { for _, x := range s { if !x.IsPublic { res = append(res, x) } } return } // FilterPublic returns a slice containing all subnets marked as public. func (s Subnets) FilterPublic() (res []*Subnet) { for _, x := range s { if x.IsPublic { res = append(res, x) } } return }
1
6,076
i think you have to regen the api stuff, `make generate`
kubernetes-sigs-cluster-api-provider-aws
go
@@ -187,9 +187,17 @@ public class SECP256K1 { signer.init(true, privKey); final BigInteger[] components = signer.generateSignature(dataHash.toArrayUnsafe()); - final BigInteger r = components[0]; - BigInteger s = components[1]; + return normaliseSignature(components[0], components[1], keyPair.getPublicKey(), dataHash); + } + + public static Signature normaliseSignature( + final BigInteger nativeR, + final BigInteger nativeS, + final PublicKey publicKey, + final Bytes32 dataHash) { + + BigInteger s = nativeS; // Automatically adjust the S component to be less than or equal to half the curve // order, if necessary. This is required because for every signature (r,s) the signature // (r, -s (mod N)) is a valid signature of the same message. However, we dislike the
1
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.besu.crypto; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.math.BigInteger; import java.security.InvalidAlgorithmParameterException; import java.security.KeyPairGenerator; import java.security.Security; import java.security.spec.ECGenParameterSpec; import java.util.Arrays; import java.util.Objects; import java.util.Optional; import java.util.function.UnaryOperator; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.bytes.MutableBytes; import org.apache.tuweni.units.bigints.UInt256; import org.bouncycastle.asn1.sec.SECNamedCurves; import org.bouncycastle.asn1.x9.X9ECParameters; import org.bouncycastle.asn1.x9.X9IntegerConverter; import org.bouncycastle.crypto.agreement.ECDHBasicAgreement; import org.bouncycastle.crypto.digests.SHA256Digest; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.crypto.params.ECPublicKeyParameters; import org.bouncycastle.crypto.signers.ECDSASigner; import org.bouncycastle.crypto.signers.HMacDSAKCalculator; import org.bouncycastle.jcajce.provider.asymmetric.ec.BCECPrivateKey; import org.bouncycastle.jcajce.provider.asymmetric.ec.BCECPublicKey; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.math.ec.ECAlgorithms; import org.bouncycastle.math.ec.ECPoint; import org.bouncycastle.math.ec.FixedPointCombMultiplier; import org.bouncycastle.math.ec.custom.sec.SecP256K1Curve; /* * Adapted from the BitcoinJ ECKey (Apache 2 License) implementation: * https://github.com/bitcoinj/bitcoinj/blob/master/core/src/main/java/org/bitcoinj/core/ECKey.java * * * Adapted from the web3j (Apache 2 License) implementations: * https://github.com/web3j/web3j/crypto/src/main/java/org/web3j/crypto/*.java */ public class SECP256K1 { public static final String ALGORITHM = "ECDSA"; public static final String CURVE_NAME = "secp256k1"; public static final String PROVIDER = "BC"; public static final ECDomainParameters CURVE; public static final BigInteger HALF_CURVE_ORDER; private static final KeyPairGenerator KEY_PAIR_GENERATOR; private static final BigInteger CURVE_ORDER; static { Security.addProvider(new BouncyCastleProvider()); final X9ECParameters params = SECNamedCurves.getByName(CURVE_NAME); CURVE = new ECDomainParameters(params.getCurve(), params.getG(), params.getN(), params.getH()); CURVE_ORDER = CURVE.getN(); HALF_CURVE_ORDER = CURVE_ORDER.shiftRight(1); try { KEY_PAIR_GENERATOR = KeyPairGenerator.getInstance(ALGORITHM, PROVIDER); } catch (final Exception e) { throw new RuntimeException(e); } final ECGenParameterSpec ecGenParameterSpec = new ECGenParameterSpec(CURVE_NAME); try { KEY_PAIR_GENERATOR.initialize(ecGenParameterSpec, SecureRandomProvider.createSecureRandom()); } catch (final InvalidAlgorithmParameterException e) { throw new RuntimeException(e); } } /** Decompress a compressed public key (x co-ord and low-bit of y-coord). */ private static ECPoint decompressKey(final BigInteger xBN, final boolean yBit) { final X9IntegerConverter x9 = new X9IntegerConverter(); final byte[] compEnc = x9.integerToBytes(xBN, 1 + x9.getByteLength(CURVE.getCurve())); compEnc[0] = (byte) (yBit ? 0x03 : 0x02); // TODO: Find a better way to handle an invalid point compression here. // Currently ECCurve#decodePoint throws an IllegalArgumentException. return CURVE.getCurve().decodePoint(compEnc); } /** * Given the components of a signature and a selector value, recover and return the public key * that generated the signature according to the algorithm in SEC1v2 section 4.1.6. * * <p>If this method returns null it means recovery was not possible and recId should be iterated. * * <p>Given the above two points, a correct usage of this method is inside a for loop from 0 to 3, * and if the output is null OR a key that is not the one you expect, you try again with the next * recId. * * @param recId Which possible key to recover. * @param r The R component of the signature. * @param s The S component of the signature. * @param dataHash Hash of the data that was signed. * @return An ECKey containing only the public part, or null if recovery wasn't possible. */ private static BigInteger recoverFromSignature( final int recId, final BigInteger r, final BigInteger s, final Bytes32 dataHash) { assert (recId >= 0); assert (r.signum() >= 0); assert (s.signum() >= 0); assert (dataHash != null); // 1.0 For j from 0 to h (h == recId here and the loop is outside this function) // 1.1 Let x = r + jn final BigInteger n = CURVE.getN(); // Curve order. final BigInteger i = BigInteger.valueOf((long) recId / 2); final BigInteger x = r.add(i.multiply(n)); // 1.2. Convert the integer x to an octet string X of length mlen using the conversion // routine specified in Section 2.3.7, where mlen = ⌈(log2 p)/8⌉ or mlen = ⌈m/8⌉. // 1.3. Convert the octet string (16 set binary digits)||X to an elliptic curve point R // using the conversion routine specified in Section 2.3.4. If this conversion // routine outputs "invalid", then do another iteration of Step 1. // // More concisely, what these points mean is to use X as a compressed public key. final BigInteger prime = SecP256K1Curve.q; if (x.compareTo(prime) >= 0) { // Cannot have point co-ordinates larger than this as everything takes place modulo Q. return null; } // Compressed keys require you to know an extra bit of data about the y-coord as there are // two possibilities. So it's encoded in the recId. final ECPoint R = decompressKey(x, (recId & 1) == 1); // 1.4. If nR != point at infinity, then do another iteration of Step 1 (callers // responsibility). if (!R.multiply(n).isInfinity()) { return null; } // 1.5. Compute e from M using Steps 2 and 3 of ECDSA signature verification. final BigInteger e = dataHash.toUnsignedBigInteger(); // 1.6. For k from 1 to 2 do the following. (loop is outside this function via // iterating recId) // 1.6.1. Compute a candidate public key as: // Q = mi(r) * (sR - eG) // // Where mi(x) is the modular multiplicative inverse. We transform this into the following: // Q = (mi(r) * s ** R) + (mi(r) * -e ** G) // Where -e is the modular additive inverse of e, that is z such that z + e = 0 (mod n). // In the above equation ** is point multiplication and + is point addition (the EC group // operator). // // We can find the additive inverse by subtracting e from zero then taking the mod. For // example the additive inverse of 3 modulo 11 is 8 because 3 + 8 mod 11 = 0, and // -3 mod 11 = 8. final BigInteger eInv = BigInteger.ZERO.subtract(e).mod(n); final BigInteger rInv = r.modInverse(n); final BigInteger srInv = rInv.multiply(s).mod(n); final BigInteger eInvrInv = rInv.multiply(eInv).mod(n); final ECPoint q = ECAlgorithms.sumOfTwoMultiplies(CURVE.getG(), eInvrInv, R, srInv); if (q.isInfinity()) { return null; } final byte[] qBytes = q.getEncoded(false); // We remove the prefix return new BigInteger(1, Arrays.copyOfRange(qBytes, 1, qBytes.length)); } public static Signature sign(final Bytes32 dataHash, final KeyPair keyPair) { final ECDSASigner signer = new ECDSASigner(new HMacDSAKCalculator(new SHA256Digest())); final ECPrivateKeyParameters privKey = new ECPrivateKeyParameters( keyPair.getPrivateKey().getEncodedBytes().toUnsignedBigInteger(), CURVE); signer.init(true, privKey); final BigInteger[] components = signer.generateSignature(dataHash.toArrayUnsafe()); final BigInteger r = components[0]; BigInteger s = components[1]; // Automatically adjust the S component to be less than or equal to half the curve // order, if necessary. This is required because for every signature (r,s) the signature // (r, -s (mod N)) is a valid signature of the same message. However, we dislike the // ability to modify the bits of a Bitcoin transaction after it's been signed, as that // violates various assumed invariants. Thus in future only one of those forms will be // considered legal and the other will be banned. if (s.compareTo(HALF_CURVE_ORDER) > 0) { // The order of the curve is the number of valid points that exist on that curve. // If S is in the upper half of the number of valid points, then bring it back to // the lower half. Otherwise, imagine that // N = 10 // s = 8, so (-8 % 10 == 2) thus both (r, 8) and (r, 2) are valid solutions. // 10 - 8 == 2, giving us always the latter solution, which is canonical. s = CURVE.getN().subtract(s); } // Now we have to work backwards to figure out the recId needed to recover the signature. int recId = -1; final BigInteger publicKeyBI = keyPair.getPublicKey().getEncodedBytes().toUnsignedBigInteger(); for (int i = 0; i < 4; i++) { final BigInteger k = recoverFromSignature(i, r, s, dataHash); if (k != null && k.equals(publicKeyBI)) { recId = i; break; } } if (recId == -1) { throw new RuntimeException( "Could not construct a recoverable key. This should never happen."); } return new Signature(r, s, (byte) recId); } /** * Verifies the given ECDSA signature against the message bytes using the public key bytes. * * <p>When using native ECDSA verification, data must be 32 bytes, and no element may be larger * than 520 bytes. * * @param data Hash of the data to verify. * @param signature ASN.1 encoded signature. * @param pub The public key bytes to use. * @return True if the verification is successful. */ public static boolean verify(final Bytes data, final Signature signature, final PublicKey pub) { final ECDSASigner signer = new ECDSASigner(); final Bytes toDecode = Bytes.wrap(Bytes.of((byte) 4), pub.getEncodedBytes()); final ECPublicKeyParameters params = new ECPublicKeyParameters(CURVE.getCurve().decodePoint(toDecode.toArrayUnsafe()), CURVE); signer.init(false, params); try { return signer.verifySignature(data.toArrayUnsafe(), signature.r, signature.s); } catch (final NullPointerException e) { // Bouncy Castle contains a bug that can cause NPEs given specially crafted signatures. Those // signatures // are inherently invalid/attack sigs so we just fail them here rather than crash the thread. return false; } } /** * Verifies the given ECDSA signature using the public key bytes against the message bytes, * previously passed through a preprocessor function, which is normally a hashing function. * * @param data The data to verify. * @param signature ASN.1 encoded signature. * @param pub The public key bytes to use. * @param preprocessor The function to apply to the data before verifying the signature, normally * a hashing function. * @return True if the verification is successful. */ public static boolean verify( final Bytes data, final Signature signature, final PublicKey pub, final UnaryOperator<Bytes> preprocessor) { checkArgument(preprocessor != null, "preprocessor must not be null"); return verify(preprocessor.apply(data), signature, pub); } /** * Calculates an ECDH key agreement between the private and the public key. * * @param privKey The private key. * @param theirPubKey The public key. * @return The agreed secret. */ public static Bytes32 calculateECDHKeyAgreement( final PrivateKey privKey, final PublicKey theirPubKey) { checkArgument(privKey != null, "missing private key"); checkArgument(theirPubKey != null, "missing remote public key"); final ECPrivateKeyParameters privKeyP = new ECPrivateKeyParameters(privKey.getD(), CURVE); final ECPublicKeyParameters pubKeyP = new ECPublicKeyParameters(theirPubKey.asEcPoint(), CURVE); final ECDHBasicAgreement agreement = new ECDHBasicAgreement(); agreement.init(privKeyP); final BigInteger agreed = agreement.calculateAgreement(pubKeyP); return UInt256.valueOf(agreed).toBytes(); } public static class PrivateKey implements java.security.PrivateKey { private final Bytes32 encoded; private PrivateKey(final Bytes32 encoded) { checkNotNull(encoded); this.encoded = encoded; } public static PrivateKey create(final BigInteger key) { checkNotNull(key); return create(UInt256.valueOf(key).toBytes()); } public static PrivateKey create(final Bytes32 key) { return new PrivateKey(key); } public ECPoint asEcPoint() { return CURVE.getCurve().decodePoint(encoded.toArrayUnsafe()); } @Override public boolean equals(final Object other) { if (!(other instanceof PrivateKey)) { return false; } final PrivateKey that = (PrivateKey) other; return this.encoded.equals(that.encoded); } @Override public byte[] getEncoded() { return encoded.toArrayUnsafe(); } public Bytes32 getEncodedBytes() { return encoded; } public BigInteger getD() { return encoded.toUnsignedBigInteger(); } @Override public String getAlgorithm() { return ALGORITHM; } @Override public String getFormat() { return null; } @Override public int hashCode() { return encoded.hashCode(); } @Override public String toString() { return encoded.toString(); } } public static class PublicKey implements java.security.PublicKey { private static final int BYTE_LENGTH = 64; private final Bytes encoded; public static PublicKey create(final PrivateKey privateKey) { BigInteger privKey = privateKey.getEncodedBytes().toUnsignedBigInteger(); /* * TODO: FixedPointCombMultiplier currently doesn't support scalars longer than the group * order, but that could change in future versions. */ if (privKey.bitLength() > CURVE.getN().bitLength()) { privKey = privKey.mod(CURVE.getN()); } final ECPoint point = new FixedPointCombMultiplier().multiply(CURVE.getG(), privKey); return PublicKey.create(Bytes.wrap(Arrays.copyOfRange(point.getEncoded(false), 1, 65))); } private static Bytes toBytes64(final byte[] backing) { if (backing.length == BYTE_LENGTH) { return Bytes.wrap(backing); } else if (backing.length > BYTE_LENGTH) { return Bytes.wrap(backing, backing.length - BYTE_LENGTH, BYTE_LENGTH); } else { final MutableBytes res = MutableBytes.create(BYTE_LENGTH); Bytes.wrap(backing).copyTo(res, BYTE_LENGTH - backing.length); return res; } } public static PublicKey create(final BigInteger key) { checkNotNull(key); return create(toBytes64(key.toByteArray())); } public static PublicKey create(final Bytes encoded) { return new PublicKey(encoded); } public static Optional<PublicKey> recoverFromSignature( final Bytes32 dataHash, final Signature signature) { final BigInteger publicKeyBI = SECP256K1.recoverFromSignature( signature.getRecId(), signature.getR(), signature.getS(), dataHash); return Optional.ofNullable(publicKeyBI).map(PublicKey::create); } private PublicKey(final Bytes encoded) { checkNotNull(encoded); checkArgument( encoded.size() == BYTE_LENGTH, "Encoding must be %s bytes long, got %s", BYTE_LENGTH, encoded.size()); this.encoded = encoded; } /** * Returns this public key as an {@link ECPoint} of Bouncy Castle, to facilitate cryptographic * operations. * * @return This public key represented as an Elliptic Curve point. */ public ECPoint asEcPoint() { // 0x04 is the prefix for uncompressed keys. final Bytes val = Bytes.concatenate(Bytes.of(0x04), encoded); return CURVE.getCurve().decodePoint(val.toArrayUnsafe()); } @Override public boolean equals(final Object other) { if (!(other instanceof PublicKey)) { return false; } final PublicKey that = (PublicKey) other; return this.encoded.equals(that.encoded); } @Override public byte[] getEncoded() { return encoded.toArrayUnsafe(); } public Bytes getEncodedBytes() { return encoded; } @Override public String getAlgorithm() { return ALGORITHM; } @Override public String getFormat() { return null; } @Override public int hashCode() { return encoded.hashCode(); } @Override public String toString() { return encoded.toString(); } } public static class KeyPair { private final PrivateKey privateKey; private final PublicKey publicKey; public KeyPair(final PrivateKey privateKey, final PublicKey publicKey) { checkNotNull(privateKey); checkNotNull(publicKey); this.privateKey = privateKey; this.publicKey = publicKey; } public static KeyPair create(final PrivateKey privateKey) { return new KeyPair(privateKey, PublicKey.create(privateKey)); } public static KeyPair generate() { final java.security.KeyPair rawKeyPair = KEY_PAIR_GENERATOR.generateKeyPair(); final BCECPrivateKey privateKey = (BCECPrivateKey) rawKeyPair.getPrivate(); final BCECPublicKey publicKey = (BCECPublicKey) rawKeyPair.getPublic(); final BigInteger privateKeyValue = privateKey.getD(); // Ethereum does not use encoded public keys like bitcoin - see // https://en.bitcoin.it/wiki/Elliptic_Curve_Digital_Signature_Algorithm for details // Additionally, as the first bit is a constant prefix (0x04) we ignore this value final byte[] publicKeyBytes = publicKey.getQ().getEncoded(false); final BigInteger publicKeyValue = new BigInteger(1, Arrays.copyOfRange(publicKeyBytes, 1, publicKeyBytes.length)); return new KeyPair(PrivateKey.create(privateKeyValue), PublicKey.create(publicKeyValue)); } @Override public int hashCode() { return Objects.hash(privateKey, publicKey); } @Override public boolean equals(final Object other) { if (!(other instanceof KeyPair)) { return false; } final KeyPair that = (KeyPair) other; return this.privateKey.equals(that.privateKey) && this.publicKey.equals(that.publicKey); } public PrivateKey getPrivateKey() { return privateKey; } public PublicKey getPublicKey() { return publicKey; } } public static class Signature { public static final int BYTES_REQUIRED = 65; /** * The recovery id to reconstruct the public key used to create the signature. * * <p>The recId is an index from 0 to 3 which indicates which of the 4 possible keys is the * correct one. Because the key recovery operation yields multiple potential keys, the correct * key must either be stored alongside the signature, or you must be willing to try each recId * in turn until you find one that outputs the key you are expecting. */ private final byte recId; private final BigInteger r; private final BigInteger s; private Signature(final BigInteger r, final BigInteger s, final byte recId) { this.r = r; this.s = s; this.recId = recId; } /** * Creates a new signature object given its parameters. * * @param r the 'r' part of the signature. * @param s the 's' part of the signature. * @param recId the recovery id part of the signature. * @return the created {@link Signature} object. * @throws NullPointerException if {@code r} or {@code s} are {@code null}. * @throws IllegalArgumentException if any argument is invalid (for instance, {@code v} is * neither 27 or 28). */ public static Signature create(final BigInteger r, final BigInteger s, final byte recId) { checkNotNull(r); checkNotNull(s); checkInBounds("r", r); checkInBounds("s", s); if (recId != 0 && recId != 1) { throw new IllegalArgumentException( "Invalid 'recId' value, should be 0 or 1 but got " + recId); } return new Signature(r, s, recId); } private static void checkInBounds(final String name, final BigInteger i) { if (i.compareTo(BigInteger.ONE) < 0) { throw new IllegalArgumentException( String.format("Invalid '%s' value, should be >= 1 but got %s", name, i)); } if (i.compareTo(CURVE_ORDER) >= 0) { throw new IllegalArgumentException( String.format("Invalid '%s' value, should be < %s but got %s", CURVE_ORDER, name, i)); } } public static Signature decode(final Bytes bytes) { checkArgument( bytes.size() == BYTES_REQUIRED, "encoded SECP256K1 signature must be 65 bytes long"); final BigInteger r = bytes.slice(0, 32).toUnsignedBigInteger(); final BigInteger s = bytes.slice(32, 32).toUnsignedBigInteger(); final byte recId = bytes.get(64); return SECP256K1.Signature.create(r, s, recId); } public Bytes encodedBytes() { final MutableBytes bytes = MutableBytes.create(BYTES_REQUIRED); UInt256.valueOf(r).toBytes().copyTo(bytes, 0); UInt256.valueOf(s).toBytes().copyTo(bytes, 32); bytes.set(64, recId); return bytes; } @Override public boolean equals(final Object other) { if (!(other instanceof Signature)) { return false; } final Signature that = (Signature) other; return this.r.equals(that.r) && this.s.equals(that.s) && this.recId == that.recId; } @Override public int hashCode() { return Objects.hash(r, s, recId); } public byte getRecId() { return recId; } public BigInteger getR() { return r; } public BigInteger getS() { return s; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("SECP256K1.Signature").append("{"); sb.append("r=").append(r).append(", "); sb.append("s=").append(s).append(", "); sb.append("recId=").append(recId); return sb.append("}").toString(); } } }
1
22,188
DataHash shouldn't be needed here
hyperledger-besu
java
@@ -110,7 +110,7 @@ public class WebDriverBackedSeleniumHandler implements Routable { StringBuilder printableArgs = new StringBuilder("["); Joiner.on(", ").appendTo(printableArgs, args); printableArgs.append("]"); - LOG.info(String.format("Command request: %s%s on session %s", cmd, printableArgs, sessionId)); + LOG.finest(String.format("Command request: %s%s on session %s", cmd, printableArgs, sessionId)); if ("getNewBrowserSession".equals(cmd)) { // Figure out what to do. If the first arg is "*webdriver", check for a session id and use
1
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.thoughtworks.selenium.webdriven; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.thoughtworks.selenium.CommandProcessor; import com.thoughtworks.selenium.SeleniumException; import io.opentracing.Tracer; import org.openqa.selenium.Capabilities; import org.openqa.selenium.ImmutableCapabilities; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.edge.EdgeOptions; import org.openqa.selenium.firefox.FirefoxOptions; import org.openqa.selenium.grid.session.ActiveSession; import org.openqa.selenium.ie.InternetExplorerOptions; import org.openqa.selenium.opera.OperaOptions; import org.openqa.selenium.remote.NewSessionPayload; import org.openqa.selenium.remote.SessionId; import org.openqa.selenium.remote.http.FormEncodedData; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; import org.openqa.selenium.remote.http.Routable; import org.openqa.selenium.remote.server.ActiveSessionFactory; import org.openqa.selenium.remote.server.ActiveSessionListener; import org.openqa.selenium.remote.server.ActiveSessions; import org.openqa.selenium.remote.server.NewSessionPipeline; import org.openqa.selenium.safari.SafariOptions; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.logging.Level.WARNING; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.POST; /** * An implementation of the original selenium rc server endpoint, using a webdriver-backed selenium * in order to get things working. */ public class WebDriverBackedSeleniumHandler implements Routable { // Prepare the shared set of thingies private static final Map<SessionId, CommandProcessor> PROCESSORS = new ConcurrentHashMap<>(); public static final Logger LOG = Logger.getLogger(WebDriverBackedSelenium.class.getName()); private NewSessionPipeline pipeline; private ActiveSessions sessions; private ActiveSessionListener listener; public WebDriverBackedSeleniumHandler(Tracer tracer, ActiveSessions sessions) { this.sessions = sessions == null ? new ActiveSessions(5, MINUTES) : sessions; listener = new ActiveSessionListener() { @Override public void onStop(ActiveSession session) { PROCESSORS.remove(session.getId()); } }; sessions.addListener(listener); this.pipeline = NewSessionPipeline.builder().add(new ActiveSessionFactory(tracer)).create(); } @Override public boolean matches(HttpRequest req) { return req.getMethod() == POST && ("/selenium-server/driver/".equals(req.getUri()) || "/selenium-server/driver".equals(req.getUri())); } @Override public HttpResponse execute(HttpRequest req) throws UncheckedIOException { Optional<Map<String, List<String>>> params = FormEncodedData.getData(req); String cmd = getValue("cmd", params, req); SessionId sessionId = null; if (getValue("sessionId", params, req) != null) { sessionId = new SessionId(getValue("sessionId", params, req)); } String[] args = deserializeArgs(params, req); if (cmd == null) { return sendError(HTTP_NOT_FOUND, "Unable to find cmd query parameter"); } StringBuilder printableArgs = new StringBuilder("["); Joiner.on(", ").appendTo(printableArgs, args); printableArgs.append("]"); LOG.info(String.format("Command request: %s%s on session %s", cmd, printableArgs, sessionId)); if ("getNewBrowserSession".equals(cmd)) { // Figure out what to do. If the first arg is "*webdriver", check for a session id and use // that existing session if present. Otherwise, start a new session with whatever comes to // hand. If, however, the first parameter specifies something else, then create a session // using a webdriver-backed instance of that. return startNewSession(args[0], args[1], args.length == 4 ? args[3] : ""); } else if ("testComplete".equals(cmd)) { CommandProcessor commandProcessor = PROCESSORS.get(sessionId); sessions.invalidate(sessionId); if (commandProcessor == null) { return sendError(HTTP_NOT_FOUND, "Unable to find command processor for " + sessionId); } return sendResponse(null); } // Common case. CommandProcessor commandProcessor = PROCESSORS.get(sessionId); if (commandProcessor == null) { return sendError(HTTP_NOT_FOUND, "Unable to find command processor for " + sessionId); } try { String result = commandProcessor.doCommand(cmd, args); return sendResponse(result); } catch (SeleniumException e) { return sendError(HTTP_OK, e.getMessage()); } } private HttpResponse startNewSession( String browserString, String baseUrl, String options) { SessionId sessionId = null; if (options.startsWith("webdriver.remote.sessionid")) { // We may have a hit List<String> split = Splitter.on("=") .omitEmptyStrings() .trimResults() .limit(2) .splitToList(options); if (!"webdriver.remote.sessionid".equals(split.get(0))) { LOG.warning("Unable to find existing webdriver session. Wrong parameter name: " + options); return sendError( HTTP_OK, "Unable to find existing webdriver session. Wrong parameter name: " + options); } if (split.size() != 2) { LOG.warning("Attempted to find webdriver id, but none specified. Bailing"); return sendError( HTTP_OK, "Unable to find existing webdriver session. No ID specified"); } sessionId = new SessionId(split.get(1)); } if (sessionId == null) { // Let's see if the user chose "webdriver" or something specific. Capabilities caps; switch (browserString) { case "*webdriver": caps = new ImmutableCapabilities(); break; case "*chrome": case "*firefox": case "*firefoxproxy": case "*firefoxchrome": case "*pifirefox": caps = new FirefoxOptions(); break; case "*iehta": case "*iexplore": case "*iexploreproxy": case "*piiexplore": caps = new InternetExplorerOptions(); break; case "*googlechrome": caps = new ChromeOptions(); break; case "*MicrosoftEdge": caps = new EdgeOptions(); break; case "*opera": case "*operablink": caps = new OperaOptions(); break; case "*safari": case "*safariproxy": caps = new SafariOptions(); break; default: return sendError(HTTP_OK, "Unable to match browser string: " + browserString); } try (NewSessionPayload payload = NewSessionPayload.create(caps)) { ActiveSession session = pipeline.createNewSession(payload); sessions.put(session); sessionId = session.getId(); } catch (Exception e) { LOG.log(WARNING, "Unable to start session", e); return sendError( HTTP_OK, "Unable to start session. Cause can be found in logs. Message is: " + e.getMessage()); } } ActiveSession session = sessions.get(sessionId); if (session == null) { LOG.warning("Attempt to use non-existent session: " + sessionId); return sendError(HTTP_OK, "Attempt to use non-existent session: " + sessionId); } PROCESSORS.put(sessionId, new WebDriverCommandProcessor(baseUrl, session.getWrappedDriver())); return sendResponse(sessionId.toString()); } private HttpResponse sendResponse(String result) { return new HttpResponse() .setStatus(HTTP_OK) .setHeader("", "") .setContent(utf8String("OK".concat(result == null ? "" : "," + result))); } private HttpResponse sendError(int statusCode, String result) { return new HttpResponse() .setStatus(statusCode) .setHeader("", "") .setContent(utf8String("ERROR".concat(result == null ? "" : ": " + result))); } private String[] deserializeArgs(Optional<Map<String, List<String>>> params, HttpRequest req) { // 5 was picked as the maximum length used by the `start` command List<String> args = new ArrayList<>(); for (int i = 0; i < 5; i++) { String value = getValue(String.valueOf(i + 1), params, req); if (value != null) { args.add(value); } else { break; } } return args.toArray(new String[0]); } private String getValue(String key, Optional<Map<String, List<String>>> params, HttpRequest request) { return params.map(data -> { List<String> values = data.getOrDefault(key, new ArrayList<>()); if (values.isEmpty()) { return request.getQueryParameter(key); } return values.get(0); }).orElseGet(() -> request.getQueryParameter(key)); } }
1
17,121
`info` was chosen deliberately to mirror the old behaviour that users expected.
SeleniumHQ-selenium
rb
@@ -31,8 +31,8 @@ describeComponent( component._selectize.setValue(['item 3', 'item 2']); }); - expect(component.get('value'), 'component value').to.deep.equal(['item 3', 'item 2']); - expect(component.get('selection'), 'component selection').to.deep.equal(['item 3', 'item 2']); + expect(component.get('value').toArray(), 'component value').to.deep.equal(['item 3', 'item 2']); + expect(component.get('selection').toArray(), 'component selection').to.deep.equal(['item 3', 'item 2']); }); } );
1
/* jshint expr:true */ import { expect } from 'chai'; import { describeComponent, it } from 'ember-mocha'; import {A as emberA} from 'ember-array/utils'; import run from 'ember-runloop'; describeComponent( 'gh-selectize', 'Unit: Component: gh-selectize', { // Specify the other units that are required for this test // needs: ['component:foo', 'helper:bar'], unit: true }, function () { it('re-orders selection when selectize order is changed', function () { let component = this.subject(); run(() => { component.set('content', emberA(['item 1', 'item 2', 'item 3'])); component.set('selection', emberA(['item 2', 'item 3'])); component.set('multiple', true); }); this.render(); run(() => { component._selectize.setValue(['item 3', 'item 2']); }); expect(component.get('value'), 'component value').to.deep.equal(['item 3', 'item 2']); expect(component.get('selection'), 'component selection').to.deep.equal(['item 3', 'item 2']); }); } );
1
7,398
For some reason, Safari handles ember arrays differently than Chrome/Firefox. To get around that, we cast the ember array to a regular array to verify the deep equal. (Note: I tried wrapping the expected value in an emberA - which would look like `expect(someEmberArray).to.deep.equal(emberA(['a', 'b']));` but that didn't work either)
TryGhost-Admin
js
@@ -36,6 +36,7 @@ public class VectorHolder { private final Dictionary dictionary; private final NullabilityHolder nullabilityHolder; private final Type icebergType; + private final Type originalIcebergType; public VectorHolder( ColumnDescriptor columnDescriptor, FieldVector vector, boolean isDictionaryEncoded,
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.arrow.vectorized; import org.apache.arrow.util.Preconditions; import org.apache.arrow.vector.FieldVector; import org.apache.iceberg.types.Type; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.Dictionary; /** * Container class for holding the Arrow vector storing a batch of values along with other state needed for reading * values out of it. */ public class VectorHolder { private final ColumnDescriptor columnDescriptor; private final FieldVector vector; private final boolean isDictionaryEncoded; private final Dictionary dictionary; private final NullabilityHolder nullabilityHolder; private final Type icebergType; public VectorHolder( ColumnDescriptor columnDescriptor, FieldVector vector, boolean isDictionaryEncoded, Dictionary dictionary, NullabilityHolder holder, Type type) { // All the fields except dictionary are not nullable unless it is a dummy holder Preconditions.checkNotNull(columnDescriptor, "ColumnDescriptor cannot be null"); Preconditions.checkNotNull(vector, "Vector cannot be null"); Preconditions.checkNotNull(holder, "NullabilityHolder cannot be null"); Preconditions.checkNotNull(type, "IcebergType cannot be null"); this.columnDescriptor = columnDescriptor; this.vector = vector; this.isDictionaryEncoded = isDictionaryEncoded; this.dictionary = dictionary; this.nullabilityHolder = holder; this.icebergType = type; } // Only used for returning dummy holder private VectorHolder() { columnDescriptor = null; vector = null; isDictionaryEncoded = false; dictionary = null; nullabilityHolder = null; icebergType = null; } private VectorHolder(FieldVector vec, Type type, NullabilityHolder nulls) { columnDescriptor = null; vector = vec; isDictionaryEncoded = false; dictionary = null; nullabilityHolder = nulls; icebergType = type; } public ColumnDescriptor descriptor() { return columnDescriptor; } public FieldVector vector() { return vector; } public boolean isDictionaryEncoded() { return isDictionaryEncoded; } public Dictionary dictionary() { return dictionary; } public NullabilityHolder nullabilityHolder() { return nullabilityHolder; } public Type icebergType() { return icebergType; } public int numValues() { return vector.getValueCount(); } public static <T> VectorHolder constantHolder(int numRows, T constantValue) { return new ConstantVectorHolder(numRows, constantValue); } public static VectorHolder dummyHolder(int numRows) { return new ConstantVectorHolder(numRows); } public boolean isDummy() { return vector == null; } /** * A Vector Holder which does not actually produce values, consumers of this class should * use the constantValue to populate their ColumnVector implementation. */ public static class ConstantVectorHolder<T> extends VectorHolder { private final T constantValue; private final int numRows; public ConstantVectorHolder(int numRows) { this.numRows = numRows; this.constantValue = null; } public ConstantVectorHolder(int numRows, T constantValue) { this.numRows = numRows; this.constantValue = constantValue; } @Override public int numValues() { return this.numRows; } public Object getConstant() { return constantValue; } } public static class PositionVectorHolder extends VectorHolder { public PositionVectorHolder(FieldVector vector, Type type, NullabilityHolder nulls) { super(vector, type, nulls); } } }
1
42,993
`icebergType` indicates the corresponding iceberg type of the `FieldVector` `originalIcebergType` indicates what the real type we will read from the `FieldVector`. For example, the parquet int backed decimal, the `icebergType` should be integer, `originalIcebergType` is decimal. I intend to only keep the `originalIcebergType`, but listening to your ideas.
apache-iceberg
java
@@ -43,7 +43,7 @@ // - Reader: *storage.Reader // - Attributes: storage.ObjectAttrs // - CopyOptions.BeforeCopy: *storage.Copier -// - WriterOptions.BeforeWrite: *storage.Writer +// - WriterOptions.BeforeWrite: **storage.ObjectHandle, *storage.Writer package gcsblob // import "gocloud.dev/blob/gcsblob" import (
1
// Copyright 2018 The Go Cloud Development Kit Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package gcsblob provides a blob implementation that uses GCS. Use OpenBucket // to construct a *blob.Bucket. // // URLs // // For blob.OpenBucket, gcsblob registers for the scheme "gs". // The default URL opener will creating a connection using use default // credentials from the environment, as described in // https://cloud.google.com/docs/authentication/production. // To customize the URL opener, or for more details on the URL format, // see URLOpener. // See https://godoc.org/gocloud.dev#hdr-URLs for background information. // // Escaping // // Go CDK supports all UTF-8 strings; to make this work with providers lacking // full UTF-8 support, strings must be escaped (during writes) and unescaped // (during reads). The following escapes are performed for gcsblob: // - Blob keys: ASCII characters 10 and 13 are escaped to "__0x<hex>__". // Additionally, the "/" in "../" is escaped in the same way. // // As // // gcsblob exposes the following types for As: // - Bucket: *storage.Client // - Error: *googleapi.Error // - ListObject: storage.ObjectAttrs // - ListOptions.BeforeList: *storage.Query // - Reader: *storage.Reader // - Attributes: storage.ObjectAttrs // - CopyOptions.BeforeCopy: *storage.Copier // - WriterOptions.BeforeWrite: *storage.Writer package gcsblob // import "gocloud.dev/blob/gcsblob" import ( "context" "errors" "fmt" "io" "io/ioutil" "net/url" "sort" "strings" "sync" "time" "cloud.google.com/go/storage" "github.com/google/wire" "gocloud.dev/blob" "gocloud.dev/blob/driver" "gocloud.dev/gcerrors" "gocloud.dev/gcp" "gocloud.dev/internal/escape" "gocloud.dev/internal/useragent" "google.golang.org/api/googleapi" "google.golang.org/api/iterator" "google.golang.org/api/option" ) const defaultPageSize = 1000 func init() { blob.DefaultURLMux().RegisterBucket(Scheme, new(lazyCredsOpener)) } // Set holds Wire providers for this package. var Set = wire.NewSet( Options{}, URLOpener{}, ) // lazyCredsOpener obtains Application Default Credentials on the first call // lazyCredsOpener obtains Application Default Credentials on the first call // to OpenBucketURL. type lazyCredsOpener struct { init sync.Once opener *URLOpener err error } func (o *lazyCredsOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket, error) { o.init.Do(func() { creds, err := gcp.DefaultCredentials(ctx) if err != nil { o.err = err return } client, err := gcp.NewHTTPClient(gcp.DefaultTransport(), creds.TokenSource) if err != nil { o.err = err return } o.opener = &URLOpener{Client: client} }) if o.err != nil { return nil, fmt.Errorf("open bucket %v: %v", u, o.err) } return o.opener.OpenBucketURL(ctx, u) } // Scheme is the URL scheme gcsblob registers its URLOpener under on // blob.DefaultMux. const Scheme = "gs" // URLOpener opens GCS URLs like "gs://mybucket". // // The URL host is used as the bucket name. // // The following query parameters are supported: // // - access_id: sets Options.GoogleAccessID // - private_key_path: path to read for Options.PrivateKey type URLOpener struct { // Client must be set to a non-nil HTTP client authenticated with // Cloud Storage scope or equivalent. Client *gcp.HTTPClient // Options specifies the default options to pass to OpenBucket. Options Options } // OpenBucketURL opens the GCS bucket with the same name as the URL's host. func (o *URLOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket, error) { opts, err := o.forParams(ctx, u.Query()) if err != nil { return nil, fmt.Errorf("open bucket %v: %v", u, err) } return OpenBucket(ctx, o.Client, u.Host, opts) } func (o *URLOpener) forParams(ctx context.Context, q url.Values) (*Options, error) { for k := range q { if k != "access_id" && k != "private_key_path" { return nil, fmt.Errorf("invalid query parameter %q", k) } } opts := new(Options) *opts = o.Options if accessID := q.Get("access_id"); accessID != "" { opts.GoogleAccessID = accessID } if keyPath := q.Get("private_key_path"); keyPath != "" { pk, err := ioutil.ReadFile(keyPath) if err != nil { return nil, err } opts.PrivateKey = pk } return opts, nil } // Options sets options for constructing a *blob.Bucket backed by GCS. type Options struct { // GoogleAccessID represents the authorizer for SignedURL. // Required to use SignedURL. // See https://godoc.org/cloud.google.com/go/storage#SignedURLOptions. GoogleAccessID string // PrivateKey is the Google service account private key. // Exactly one of PrivateKey or SignBytes must be non-nil to use SignedURL. // See https://godoc.org/cloud.google.com/go/storage#SignedURLOptions. PrivateKey []byte // SignBytes is a function for implementing custom signing. // Exactly one of PrivateKey or SignBytes must be non-nil to use SignedURL. // See https://godoc.org/cloud.google.com/go/storage#SignedURLOptions. SignBytes func([]byte) ([]byte, error) } // openBucket returns a GCS Bucket that communicates using the given HTTP client. func openBucket(ctx context.Context, client *gcp.HTTPClient, bucketName string, opts *Options) (*bucket, error) { if client == nil { return nil, errors.New("gcsblob.OpenBucket: client is required") } if bucketName == "" { return nil, errors.New("gcsblob.OpenBucket: bucketName is required") } // We wrap the provided http.Client to add a Go CDK User-Agent. c, err := storage.NewClient(ctx, option.WithHTTPClient(useragent.HTTPClient(&client.Client, "blob"))) if err != nil { return nil, err } if opts == nil { opts = &Options{} } return &bucket{name: bucketName, client: c, opts: opts}, nil } // OpenBucket returns a *blob.Bucket backed by an existing GCS bucket. See the // package documentation for an example. func OpenBucket(ctx context.Context, client *gcp.HTTPClient, bucketName string, opts *Options) (*blob.Bucket, error) { drv, err := openBucket(ctx, client, bucketName, opts) if err != nil { return nil, err } return blob.NewBucket(drv), nil } // bucket represents a GCS bucket, which handles read, write and delete operations // on objects within it. type bucket struct { name string client *storage.Client opts *Options } var emptyBody = ioutil.NopCloser(strings.NewReader("")) // reader reads a GCS object. It implements driver.Reader. type reader struct { body io.ReadCloser attrs driver.ReaderAttributes raw *storage.Reader } func (r *reader) Read(p []byte) (int, error) { return r.body.Read(p) } // Close closes the reader itself. It must be called when done reading. func (r *reader) Close() error { return r.body.Close() } func (r *reader) Attributes() *driver.ReaderAttributes { return &r.attrs } func (r *reader) As(i interface{}) bool { p, ok := i.(**storage.Reader) if !ok { return false } *p = r.raw return true } func (b *bucket) ErrorCode(err error) gcerrors.ErrorCode { if err == storage.ErrObjectNotExist { return gcerrors.NotFound } if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { return gcerrors.NotFound } return gcerrors.Unknown } func (b *bucket) Close() error { return nil } // ListPaged implements driver.ListPaged. func (b *bucket) ListPaged(ctx context.Context, opts *driver.ListOptions) (*driver.ListPage, error) { bkt := b.client.Bucket(b.name) query := &storage.Query{ Prefix: escapeKey(opts.Prefix), Delimiter: escapeKey(opts.Delimiter), } if opts.BeforeList != nil { asFunc := func(i interface{}) bool { p, ok := i.(**storage.Query) if !ok { return false } *p = query return true } if err := opts.BeforeList(asFunc); err != nil { return nil, err } } pageSize := opts.PageSize if pageSize == 0 { pageSize = defaultPageSize } iter := bkt.Objects(ctx, query) pager := iterator.NewPager(iter, pageSize, string(opts.PageToken)) var objects []*storage.ObjectAttrs nextPageToken, err := pager.NextPage(&objects) if err != nil { return nil, err } page := driver.ListPage{NextPageToken: []byte(nextPageToken)} if len(objects) > 0 { page.Objects = make([]*driver.ListObject, len(objects)) for i, obj := range objects { asFunc := func(i interface{}) bool { p, ok := i.(*storage.ObjectAttrs) if !ok { return false } *p = *obj return true } if obj.Prefix == "" { // Regular blob. page.Objects[i] = &driver.ListObject{ Key: unescapeKey(obj.Name), ModTime: obj.Updated, Size: obj.Size, MD5: obj.MD5, AsFunc: asFunc, } } else { // "Directory". page.Objects[i] = &driver.ListObject{ Key: unescapeKey(obj.Prefix), IsDir: true, AsFunc: asFunc, } } } // GCS always returns "directories" at the end; sort them. sort.Slice(page.Objects, func(i, j int) bool { return page.Objects[i].Key < page.Objects[j].Key }) } return &page, nil } // As implements driver.As. func (b *bucket) As(i interface{}) bool { p, ok := i.(**storage.Client) if !ok { return false } *p = b.client return true } // As implements driver.ErrorAs. func (b *bucket) ErrorAs(err error, i interface{}) bool { switch v := err.(type) { case *googleapi.Error: if p, ok := i.(**googleapi.Error); ok { *p = v return true } } return false } // Attributes implements driver.Attributes. func (b *bucket) Attributes(ctx context.Context, key string) (*driver.Attributes, error) { key = escapeKey(key) bkt := b.client.Bucket(b.name) obj := bkt.Object(key) attrs, err := obj.Attrs(ctx) if err != nil { return nil, err } return &driver.Attributes{ CacheControl: attrs.CacheControl, ContentDisposition: attrs.ContentDisposition, ContentEncoding: attrs.ContentEncoding, ContentLanguage: attrs.ContentLanguage, ContentType: attrs.ContentType, Metadata: attrs.Metadata, ModTime: attrs.Updated, Size: attrs.Size, MD5: attrs.MD5, AsFunc: func(i interface{}) bool { p, ok := i.(*storage.ObjectAttrs) if !ok { return false } *p = *attrs return true }, }, nil } // NewRangeReader implements driver.NewRangeReader. func (b *bucket) NewRangeReader(ctx context.Context, key string, offset, length int64, opts *driver.ReaderOptions) (driver.Reader, error) { key = escapeKey(key) bkt := b.client.Bucket(b.name) obj := bkt.Object(key) r, err := obj.NewRangeReader(ctx, offset, length) if err != nil { return nil, err } modTime, _ := r.LastModified() return &reader{ body: r, attrs: driver.ReaderAttributes{ ContentType: r.ContentType(), ModTime: modTime, Size: r.Size(), }, raw: r, }, nil } // escapeKey does all required escaping for UTF-8 strings to work with GCS. func escapeKey(key string) string { return escape.HexEscape(key, func(r []rune, i int) bool { switch { // GCS doesn't handle these characters (determined via experimentation). case r[i] == 10 || r[i] == 13: return true // For "../", escape the trailing slash. case i > 1 && r[i] == '/' && r[i-1] == '.' && r[i-2] == '.': return true } return false }) } // unescapeKey reverses escapeKey. func unescapeKey(key string) string { return escape.HexUnescape(key) } // NewTypedWriter implements driver.NewTypedWriter. func (b *bucket) NewTypedWriter(ctx context.Context, key string, contentType string, opts *driver.WriterOptions) (driver.Writer, error) { key = escapeKey(key) bkt := b.client.Bucket(b.name) obj := bkt.Object(key) w := obj.NewWriter(ctx) w.CacheControl = opts.CacheControl w.ContentDisposition = opts.ContentDisposition w.ContentEncoding = opts.ContentEncoding w.ContentLanguage = opts.ContentLanguage w.ContentType = contentType w.ChunkSize = bufferSize(opts.BufferSize) w.Metadata = opts.Metadata w.MD5 = opts.ContentMD5 if opts.BeforeWrite != nil { asFunc := func(i interface{}) bool { p, ok := i.(**storage.Writer) if !ok { return false } *p = w return true } if err := opts.BeforeWrite(asFunc); err != nil { return nil, err } } return w, nil } // Copy implements driver.Copy. func (b *bucket) Copy(ctx context.Context, dstKey, srcKey string, opts *driver.CopyOptions) error { dstKey = escapeKey(dstKey) srcKey = escapeKey(srcKey) bkt := b.client.Bucket(b.name) copier := bkt.Object(dstKey).CopierFrom(bkt.Object(srcKey)) if opts.BeforeCopy != nil { asFunc := func(i interface{}) bool { switch v := i.(type) { case **storage.Copier: *v = copier return true } return false } if err := opts.BeforeCopy(asFunc); err != nil { return err } } _, err := copier.Run(ctx) return err } // Delete implements driver.Delete. func (b *bucket) Delete(ctx context.Context, key string) error { key = escapeKey(key) bkt := b.client.Bucket(b.name) obj := bkt.Object(key) return obj.Delete(ctx) } func (b *bucket) SignedURL(ctx context.Context, key string, dopts *driver.SignedURLOptions) (string, error) { if b.opts.GoogleAccessID == "" || (b.opts.PrivateKey == nil && b.opts.SignBytes == nil) { return "", errors.New("to use SignedURL, you must call OpenBucket with a valid Options.GoogleAccessID and exactly one of Options.PrivateKey or Options.SignBytes") } key = escapeKey(key) opts := &storage.SignedURLOptions{ Expires: time.Now().Add(dopts.Expiry), Method: "GET", GoogleAccessID: b.opts.GoogleAccessID, PrivateKey: b.opts.PrivateKey, SignBytes: b.opts.SignBytes, } return storage.SignedURL(b.name, key, opts) } func bufferSize(size int) int { if size == 0 { return googleapi.DefaultUploadChunkSize } else if size > 0 { return size } return 0 // disable buffering }
1
17,073
The double (and thus triple as seen below) pointer gives me pause. I could imagine providing some larger API surface to smooth it out, but it's likely not worth it.
google-go-cloud
go
@@ -1011,13 +1011,13 @@ describe('document', function() { }); describe('Errors', function() { - it('MongooseErrors should be instances of Error (gh-209)', function(done) { + it('MongooseError should be instances of Error (gh-209)', function(done) { const MongooseError = require('../lib/error'); const err = new MongooseError('Some message'); assert.ok(err instanceof Error); done(); }); - it('ValidationErrors should be instances of Error', function(done) { + it('ValidationError should be instances of Error', function(done) { const ValidationError = Document.ValidationError; const err = new ValidationError(new TestDocument); assert.ok(err instanceof Error);
1
'use strict'; /** * Module dependencies. */ const Document = require('../lib/document'); const EventEmitter = require('events').EventEmitter; const EmbeddedDocument = require('../lib/types/embedded'); const Query = require('../lib/query'); const _ = require('lodash'); const assert = require('assert'); const co = require('co'); const random = require('../lib/utils').random; const start = require('./common'); const validator = require('validator'); const Buffer = require('safe-buffer').Buffer; const mongoose = start.mongoose; const Schema = mongoose.Schema; const ObjectId = Schema.ObjectId; const DocumentObjectId = mongoose.Types.ObjectId; const SchemaType = mongoose.SchemaType; const ValidatorError = SchemaType.ValidatorError; const ValidationError = mongoose.Document.ValidationError; const MongooseError = mongoose.Error; /** * Test Document constructor. */ function TestDocument() { Document.apply(this, arguments); } /** * Inherits from Document. */ TestDocument.prototype.__proto__ = Document.prototype; for (const i in EventEmitter.prototype) { TestDocument[i] = EventEmitter.prototype[i]; } /** * Set a dummy schema to simulate compilation. */ const em = new Schema({title: String, body: String}); em.virtual('works').get(function() { return 'em virtual works'; }); const schema = new Schema({ test: String, oids: [ObjectId], numbers: [Number], nested: { age: Number, cool: ObjectId, deep: {x: String}, path: String, setr: String }, nested2: { nested: String, yup: { nested: Boolean, yup: String, age: Number } }, em: [em], date: Date }); TestDocument.prototype.$__setSchema(schema); schema.virtual('nested.agePlus2').get(function() { return this.nested.age + 2; }); schema.virtual('nested.setAge').set(function(v) { this.nested.age = v; }); schema.path('nested.path').get(function(v) { return (this.nested.age || '') + (v ? v : ''); }); schema.path('nested.setr').set(function(v) { return v + ' setter'; }); let dateSetterCalled = false; schema.path('date').set(function(v) { // should not have been cast to a Date yet if (v !== undefined) { assert.equal(typeof v, 'string'); } dateSetterCalled = true; return v; }); /** * Method subject to hooks. Simply fires the callback once the hooks are * executed. */ TestDocument.prototype.hooksTest = function(fn) { fn(null, arguments); }; const childSchema = new Schema({counter: Number}); const parentSchema = new Schema({ name: String, children: [childSchema] }); /** * Test. */ describe('document', function() { let db; before(function() { db = start(); }); after(function(done) { db.close(done); }); describe('delete', function() { it('deletes the document', function() { const schema = new Schema({ x: String }); const Test = db.model('gh6940', schema); return co(function* () { const test = new Test({ x: 'test' }); const doc = yield test.save(); yield doc.delete(); const found = yield Test.findOne({ _id: doc._id }); assert.strictEqual(found, null); }); }); }); describe('updateOne', function() { let Test; before(function() { const schema = new Schema({ x: String, y: String }); Test = db.model('gh6940_2', schema); }); it('updates the document', function() { return co(function* () { const test = new Test({ x: 'test' }); const doc = yield test.save(); yield doc.updateOne({ y: 'test' }); const found = yield Test.findOne({ _id: doc._id }); assert.strictEqual(found.y, 'test'); }); }); it('returns a query', function() { const doc = new Test({ x: 'test' }); assert.ok(doc.updateOne() instanceof Test.Query); }); }); describe('replaceOne', function() { it('replaces the document', function() { const schema = new Schema({ x: String }); const Test = db.model('gh6940_3', schema); return co(function* () { const test = new Test({ x: 'test' }); const doc = yield test.save(); yield doc.replaceOne({ x: 'updated' }); const found = yield Test.findOne({ _id: doc._id }); assert.strictEqual(found.x, 'updated'); }); }); }); describe('shortcut getters', function() { it('return undefined for properties with a null/undefined parent object (gh-1326)', function(done) { const doc = new TestDocument; doc.init({nested: null}); assert.strictEqual(undefined, doc.nested.age); done(); }); it('work', function(done) { const doc = new TestDocument(); doc.init({ test: 'test', oids: [], nested: { age: 5, cool: DocumentObjectId.createFromHexString('4c6c2d6240ced95d0e00003c'), path: 'my path' } }); assert.equal(doc.test, 'test'); assert.ok(doc.oids instanceof Array); assert.equal(doc.nested.age, 5); assert.equal(String(doc.nested.cool), '4c6c2d6240ced95d0e00003c'); assert.equal(doc.nested.agePlus2, 7); assert.equal(doc.nested.path, '5my path'); doc.nested.setAge = 10; assert.equal(doc.nested.age, 10); doc.nested.setr = 'set it'; assert.equal(doc.getValue('nested.setr'), 'set it setter'); const doc2 = new TestDocument(); doc2.init({ test: 'toop', oids: [], nested: { age: 2, cool: DocumentObjectId.createFromHexString('4cf70857337498f95900001c'), deep: {x: 'yay'} } }); assert.equal(doc2.test, 'toop'); assert.ok(doc2.oids instanceof Array); assert.equal(doc2.nested.age, 2); // GH-366 assert.equal(doc2.nested.bonk, undefined); assert.equal(doc2.nested.nested, undefined); assert.equal(doc2.nested.test, undefined); assert.equal(doc2.nested.age.test, undefined); assert.equal(doc2.nested.age.nested, undefined); assert.equal(doc2.oids.nested, undefined); assert.equal(doc2.nested.deep.x, 'yay'); assert.equal(doc2.nested.deep.nested, undefined); assert.equal(doc2.nested.deep.cool, undefined); assert.equal(doc2.nested2.yup.nested, undefined); assert.equal(doc2.nested2.yup.nested2, undefined); assert.equal(doc2.nested2.yup.yup, undefined); assert.equal(doc2.nested2.yup.age, undefined); assert.equal(typeof doc2.nested2.yup, 'object'); doc2.nested2.yup = { age: 150, yup: 'Yesiree', nested: true }; assert.equal(doc2.nested2.nested, undefined); assert.equal(doc2.nested2.yup.nested, true); assert.equal(doc2.nested2.yup.yup, 'Yesiree'); assert.equal(doc2.nested2.yup.age, 150); doc2.nested2.nested = 'y'; assert.equal(doc2.nested2.nested, 'y'); assert.equal(doc2.nested2.yup.nested, true); assert.equal(doc2.nested2.yup.yup, 'Yesiree'); assert.equal(doc2.nested2.yup.age, 150); assert.equal(String(doc2.nested.cool), '4cf70857337498f95900001c'); assert.ok(doc.oids !== doc2.oids); done(); }); }); it('test shortcut setters', function(done) { const doc = new TestDocument(); doc.init({ test: 'Test', nested: { age: 5 } }); assert.equal(doc.isModified('test'), false); doc.test = 'Woot'; assert.equal(doc.test, 'Woot'); assert.equal(doc.isModified('test'), true); assert.equal(doc.isModified('nested.age'), false); doc.nested.age = 2; assert.equal(doc.nested.age, 2); assert.ok(doc.isModified('nested.age')); doc.nested = {path: 'overwrite the entire nested object'}; assert.equal(doc.nested.age, undefined); assert.equal(Object.keys(doc._doc.nested).length, 1); assert.equal(doc.nested.path, 'overwrite the entire nested object'); assert.ok(doc.isModified('nested')); done(); }); it('test accessor of id', function(done) { const doc = new TestDocument(); assert.ok(doc._id instanceof DocumentObjectId); done(); }); it('test shortcut of id hexString', function(done) { const doc = new TestDocument(); assert.equal(typeof doc.id, 'string'); done(); }); it('toObject options', function(done) { const doc = new TestDocument(); doc.init({ test: 'test', oids: [], em: [{title: 'asdf'}], nested: { age: 5, cool: DocumentObjectId.createFromHexString('4c6c2d6240ced95d0e00003c'), path: 'my path' }, nested2: {}, date: new Date }); let clone = doc.toObject({getters: true, virtuals: false}); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, '5my path'); assert.equal(clone.nested.agePlus2, undefined); assert.equal(clone.em[0].works, undefined); assert.ok(clone.date instanceof Date); clone = doc.toObject({virtuals: true}); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, 'my path'); assert.equal(clone.nested.agePlus2, 7); assert.equal(clone.em[0].works, 'em virtual works'); clone = doc.toObject({getters: true}); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, '5my path'); assert.equal(clone.nested.agePlus2, 7); assert.equal(clone.em[0].works, 'em virtual works'); // test toObject options doc.schema.options.toObject = {virtuals: true}; clone = doc.toObject({transform: false, virtuals: true}); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, 'my path'); assert.equal(clone.nested.agePlus2, 7); assert.equal(clone.em[0].title, 'asdf'); delete doc.schema.options.toObject; // minimize clone = doc.toObject({minimize: true}); assert.equal(clone.nested2, undefined); clone = doc.toObject({minimize: true, getters: true}); assert.equal(clone.nested2, undefined); clone = doc.toObject({minimize: false}); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); clone = doc.toObject('2'); assert.equal(clone.nested2, undefined); doc.schema.options.toObject = {minimize: false}; clone = doc.toObject({transform: false, minimize: false}); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); delete doc.schema.options.toObject; doc.schema.options.minimize = false; clone = doc.toObject(); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); doc.schema.options.minimize = true; clone = doc.toObject(); assert.equal(clone.nested2, undefined); // transform doc.schema.options.toObject = {}; doc.schema.options.toObject.transform = function xform(doc, ret) { // ignore embedded docs if (typeof doc.ownerDocument === 'function') { return; } delete ret.em; delete ret.numbers; delete ret.oids; ret._id = ret._id.toString(); }; clone = doc.toObject(); assert.equal(doc.id, clone._id); assert.ok(undefined === clone.em); assert.ok(undefined === clone.numbers); assert.ok(undefined === clone.oids); assert.equal(clone.test, 'test'); assert.equal(clone.nested.age, 5); // transform with return value const out = {myid: doc._id.toString()}; doc.schema.options.toObject.transform = function(doc, ret) { // ignore embedded docs if (typeof doc.ownerDocument === 'function') { return; } return {myid: ret._id.toString()}; }; clone = doc.toObject(); assert.deepEqual(out, clone); // ignored transform with inline options clone = doc.toObject({x: 1, transform: false}); assert.ok(!('myid' in clone)); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, 'my path'); assert.equal(clone.em[0].constructor.name, 'Object'); // applied transform when inline transform is true clone = doc.toObject({x: 1}); assert.deepEqual(out, clone); // transform passed inline function xform(self, doc, opts) { opts.fields.split(' ').forEach(function(field) { delete doc[field]; }); } clone = doc.toObject({ transform: xform, fields: '_id em numbers oids nested' }); assert.equal(doc.test, 'test'); assert.ok(undefined === clone.em); assert.ok(undefined === clone.numbers); assert.ok(undefined === clone.oids); assert.ok(undefined === clone._id); assert.ok(undefined === clone.nested); // all done delete doc.schema.options.toObject; done(); }); it('toObject transform', function(done) { const schema = new Schema({ name: String, places: [{type: ObjectId, ref: 'toObject-transform-places'}] }); const schemaPlaces = new Schema({ identity: String }); schemaPlaces.set('toObject', { transform: function(doc, ret) { // here should be only toObject-transform-places documents assert.equal(doc.constructor.modelName, 'toObject-transform-places'); return ret; } }); const Test = db.model('toObject-transform', schema); const Places = db.model('toObject-transform-places', schemaPlaces); Places.create({identity: 'a'}, {identity: 'b'}, {identity: 'c'}, function(err, a, b, c) { Test.create({name: 'chetverikov', places: [a, b, c]}, function(err) { assert.ifError(err); Test.findOne({}).populate('places').exec(function(err, docs) { assert.ifError(err); docs.toObject({transform: true}); done(); }); }); }); }); it('saves even if `_id` is null (gh-6406)', function() { const schema = new Schema({ _id: Number, val: String }); const Model = db.model('gh6406', schema); return co(function*() { yield Model.updateOne({ _id: null }, { val: 'test' }, { upsert: true }); let doc = yield Model.findOne(); doc.val = 'test2'; // Should not throw yield doc.save(); doc = yield Model.findOne(); assert.strictEqual(doc._id, null); assert.equal(doc.val, 'test2'); }); }); it('allows you to skip validation on save (gh-2981)', function() { const schema = new Schema({ name: { type: String, required: true } }); const MyModel = db.model('gh2981', schema); const doc = new MyModel(); return doc.save({ validateBeforeSave: false }); }); it('doesnt use custom toObject options on save', function(done) { const schema = new Schema({ name: String, iWillNotBeDelete: Boolean, nested: { iWillNotBeDeleteToo: Boolean } }); schema.set('toObject', { transform: function(doc, ret) { delete ret.iWillNotBeDelete; delete ret.nested.iWillNotBeDeleteToo; return ret; } }); const Test = db.model('TestToObject', schema); Test.create({name: 'chetverikov', iWillNotBeDelete: true, 'nested.iWillNotBeDeleteToo': true}, function(err) { assert.ifError(err); Test.findOne({}, function(err, doc) { assert.ifError(err); assert.equal(doc._doc.iWillNotBeDelete, true); assert.equal(doc._doc.nested.iWillNotBeDeleteToo, true); done(); }); }); }); describe('toObject', function() { it('does not apply toObject functions of subdocuments to root document', function(done) { const subdocSchema = new Schema({ test: String, wow: String }); subdocSchema.options.toObject = {}; subdocSchema.options.toObject.transform = function(doc, ret) { delete ret.wow; }; const docSchema = new Schema({ foo: String, wow: Boolean, sub: [subdocSchema] }); const Doc = db.model('Doc', docSchema); Doc.create({ foo: 'someString', wow: true, sub: [{ test: 'someOtherString', wow: 'thisIsAString' }] }, function(err, doc) { const obj = doc.toObject({ transform: function(doc, ret) { ret.phew = 'new'; } }); assert.equal(obj.phew, 'new'); assert.ok(!doc.sub.wow); done(); }); }); it('handles child schema transforms', function(done) { const userSchema = new Schema({ name: String, email: String }); const topicSchema = new Schema({ title: String, email: String, followers: [userSchema] }); userSchema.options.toObject = { transform: function(doc, ret) { delete ret.email; } }; topicSchema.options.toObject = { transform: function(doc, ret) { ret.title = ret.title.toLowerCase(); } }; const Topic = db.model('gh2691', topicSchema, 'gh2691'); const topic = new Topic({ title: 'Favorite Foods', email: '[email protected]', followers: [{name: 'Val', email: '[email protected]'}] }); const output = topic.toObject({transform: true}); assert.equal(output.title, 'favorite foods'); assert.equal(output.email, '[email protected]'); assert.equal(output.followers[0].name, 'Val'); assert.equal(output.followers[0].email, undefined); done(); }); it('doesnt clobber child schema options when called with no params (gh-2035)', function(done) { const userSchema = new Schema({ firstName: String, lastName: String, password: String }); userSchema.virtual('fullName').get(function() { return this.firstName + ' ' + this.lastName; }); userSchema.set('toObject', {virtuals: false}); const postSchema = new Schema({ owner: {type: Schema.Types.ObjectId, ref: 'gh-2035-user'}, content: String }); postSchema.virtual('capContent').get(function() { return this.content.toUpperCase(); }); postSchema.set('toObject', {virtuals: true}); const User = db.model('gh-2035-user', userSchema, 'gh-2035-user'); const Post = db.model('gh-2035-post', postSchema, 'gh-2035-post'); const user = new User({firstName: 'Joe', lastName: 'Smith', password: 'password'}); user.save(function(err, savedUser) { assert.ifError(err); const post = new Post({owner: savedUser._id, content: 'lorem ipsum'}); post.save(function(err, savedPost) { assert.ifError(err); Post.findById(savedPost._id).populate('owner').exec(function(err, newPost) { assert.ifError(err); const obj = newPost.toObject(); assert.equal(obj.owner.fullName, undefined); done(); }); }); }); }); }); describe('toJSON', function() { it('toJSON options', function(done) { const doc = new TestDocument(); doc.init({ test: 'test', oids: [], em: [{title: 'asdf'}], nested: { age: 5, cool: DocumentObjectId.createFromHexString('4c6c2d6240ced95d0e00003c'), path: 'my path' }, nested2: {} }); // override to check if toJSON gets fired const path = TestDocument.prototype.schema.path('em'); path.casterConstructor.prototype.toJSON = function() { return {}; }; doc.schema.options.toJSON = {virtuals: true}; let clone = doc.toJSON(); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, 'my path'); assert.equal(clone.nested.agePlus2, 7); assert.equal(clone.em[0].constructor.name, 'Object'); assert.equal(Object.keys(clone.em[0]).length, 0); delete doc.schema.options.toJSON; delete path.casterConstructor.prototype.toJSON; doc.schema.options.toJSON = {minimize: false}; clone = doc.toJSON(); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); clone = doc.toJSON('8'); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); // gh-852 const arr = [doc]; let err = false; let str; try { str = JSON.stringify(arr); } catch (_) { err = true; } assert.equal(err, false); assert.ok(/nested2/.test(str)); assert.equal(clone.nested2.constructor.name, 'Object'); assert.equal(Object.keys(clone.nested2).length, 1); // transform doc.schema.options.toJSON = {}; doc.schema.options.toJSON.transform = function xform(doc, ret) { // ignore embedded docs if (typeof doc.ownerDocument === 'function') { return; } delete ret.em; delete ret.numbers; delete ret.oids; ret._id = ret._id.toString(); }; clone = doc.toJSON(); assert.equal(clone._id, doc.id); assert.ok(undefined === clone.em); assert.ok(undefined === clone.numbers); assert.ok(undefined === clone.oids); assert.equal(clone.test, 'test'); assert.equal(clone.nested.age, 5); // transform with return value const out = {myid: doc._id.toString()}; doc.schema.options.toJSON.transform = function(doc, ret) { // ignore embedded docs if (typeof doc.ownerDocument === 'function') { return; } return {myid: ret._id.toString()}; }; clone = doc.toJSON(); assert.deepEqual(out, clone); // ignored transform with inline options clone = doc.toJSON({x: 1, transform: false}); assert.ok(!('myid' in clone)); assert.equal(clone.test, 'test'); assert.ok(clone.oids instanceof Array); assert.equal(clone.nested.age, 5); assert.equal(clone.nested.cool.toString(), '4c6c2d6240ced95d0e00003c'); assert.equal(clone.nested.path, 'my path'); assert.equal(clone.em[0].constructor.name, 'Object'); // applied transform when inline transform is true clone = doc.toJSON({x: 1}); assert.deepEqual(out, clone); // transform passed inline function xform(self, doc, opts) { opts.fields.split(' ').forEach(function(field) { delete doc[field]; }); } clone = doc.toJSON({ transform: xform, fields: '_id em numbers oids nested' }); assert.equal(doc.test, 'test'); assert.ok(undefined === clone.em); assert.ok(undefined === clone.numbers); assert.ok(undefined === clone.oids); assert.ok(undefined === clone._id); assert.ok(undefined === clone.nested); // all done delete doc.schema.options.toJSON; done(); }); it('jsonifying an object', function(done) { const doc = new TestDocument({test: 'woot'}); const oidString = doc._id.toString(); // convert to json string const json = JSON.stringify(doc); // parse again const obj = JSON.parse(json); assert.equal(obj.test, 'woot'); assert.equal(obj._id, oidString); done(); }); it('jsonifying an object\'s populated items works (gh-1376)', function(done) { const userSchema = new Schema({name: String}); // includes virtual path when 'toJSON' userSchema.set('toJSON', {getters: true}); userSchema.virtual('hello').get(function() { return 'Hello, ' + this.name; }); const User = db.model('User', userSchema); const groupSchema = new Schema({ name: String, _users: [{type: Schema.ObjectId, ref: 'User'}] }); const Group = db.model('Group', groupSchema); User.create({name: 'Alice'}, {name: 'Bob'}, function(err, alice, bob) { assert.ifError(err); new Group({name: 'mongoose', _users: [alice, bob]}).save(function(err, group) { Group.findById(group).populate('_users').exec(function(err, group) { assert.ifError(err); assert.ok(group.toJSON()._users[0].hello); done(); }); }); }); }); }); describe('inspect', function() { it('inspect inherits schema options (gh-4001)', function(done) { const opts = { toObject: { virtuals: true }, toJSON: { virtuals: true } }; const taskSchema = mongoose.Schema({ name: { type: String, required: true } }, opts); taskSchema.virtual('title'). get(function() { return this.name; }). set(function(title) { this.name = title; }); const Task = db.model('gh4001', taskSchema); const doc = { name: 'task1', title: 'task999' }; Task.collection.insertOne(doc, function(error) { assert.ifError(error); Task.findById(doc._id, function(error, doc) { assert.ifError(error); assert.equal(doc.inspect().title, 'task1'); done(); }); }); }); it('does not apply transform to populated docs (gh-4213)', function(done) { const UserSchema = new Schema({ name: String }); const PostSchema = new Schema({ title: String, postedBy: { type: mongoose.Schema.Types.ObjectId, ref: 'gh4213' } }, { toObject: { transform: function(doc, ret) { delete ret._id; } }, toJSON: { transform: function(doc, ret) { delete ret._id; } } }); const User = db.model('gh4213', UserSchema); const Post = db.model('gh4213_0', PostSchema); const val = new User({ name: 'Val' }); const post = new Post({ title: 'Test', postedBy: val._id }); Post.create(post, function(error) { assert.ifError(error); User.create(val, function(error) { assert.ifError(error); Post.find({}). populate('postedBy'). exec(function(error, posts) { assert.ifError(error); assert.equal(posts.length, 1); assert.ok(posts[0].postedBy._id); done(); }); }); }); }); it('populate on nested path (gh-5703)', function() { const toySchema = new mongoose.Schema({ color: String }); const Toy = db.model('gh5703', toySchema); const childSchema = new mongoose.Schema({ name: String, values: { toy: { type: mongoose.Schema.Types.ObjectId, ref: 'gh5703' } } }); const Child = db.model('gh5703_0', childSchema); return Toy.create({ color: 'blue' }). then(function(toy) { return Child.create({ values: { toy: toy._id } }); }). then(function(child) { return Child.findById(child._id); }). then(function(child) { return child.values.populate('toy').execPopulate().then(function() { return child; }); }). then(function(child) { assert.equal(child.values.toy.color, 'blue'); }); }); }); describe.skip('#update', function() { it('returns a Query', function(done) { const mg = new mongoose.Mongoose; const M = mg.model('doc#update', {s: String}); const doc = new M; assert.ok(doc.update() instanceof Query); done(); }); it('calling update on document should relay to its model (gh-794)', function(done) { const Docs = new Schema({text: String}); const docs = db.model('docRelayUpdate', Docs); const d = new docs({text: 'A doc'}); let called = false; d.save(function() { const oldUpdate = docs.update; docs.update = function(query, operation) { assert.equal(Object.keys(query).length, 1); assert.equal(d._id, query._id); assert.equal(Object.keys(operation).length, 1); assert.equal(Object.keys(operation.$set).length, 1); assert.equal(operation.$set.text, 'A changed doc'); called = true; docs.update = oldUpdate; oldUpdate.apply(docs, arguments); }; d.update({$set: {text: 'A changed doc'}}, function(err) { assert.ifError(err); assert.equal(called, true); done(); }); }); }); }); it('toObject should not set undefined values to null', function(done) { const doc = new TestDocument(); const obj = doc.toObject(); delete obj._id; assert.deepEqual(obj, {numbers: [], oids: [], em: []}); done(); }); describe('Errors', function() { it('MongooseErrors should be instances of Error (gh-209)', function(done) { const MongooseError = require('../lib/error'); const err = new MongooseError('Some message'); assert.ok(err instanceof Error); done(); }); it('ValidationErrors should be instances of Error', function(done) { const ValidationError = Document.ValidationError; const err = new ValidationError(new TestDocument); assert.ok(err instanceof Error); done(); }); }); it('methods on embedded docs should work', function(done) { const ESchema = new Schema({name: String}); ESchema.methods.test = function() { return this.name + ' butter'; }; ESchema.statics.ten = function() { return 10; }; const E = db.model('EmbeddedMethodsAndStaticsE', ESchema); const PSchema = new Schema({embed: [ESchema]}); const P = db.model('EmbeddedMethodsAndStaticsP', PSchema); let p = new P({embed: [{name: 'peanut'}]}); assert.equal(typeof p.embed[0].test, 'function'); assert.equal(typeof E.ten, 'function'); assert.equal(p.embed[0].test(), 'peanut butter'); assert.equal(E.ten(), 10); // test push casting p = new P; p.embed.push({name: 'apple'}); assert.equal(typeof p.embed[0].test, 'function'); assert.equal(typeof E.ten, 'function'); assert.equal(p.embed[0].test(), 'apple butter'); done(); }); it('setting a positional path does not cast value to array', function(done) { const doc = new TestDocument; doc.init({numbers: [1, 3]}); assert.equal(doc.numbers[0], 1); assert.equal(doc.numbers[1], 3); doc.set('numbers.1', 2); assert.equal(doc.numbers[0], 1); assert.equal(doc.numbers[1], 2); done(); }); it('no maxListeners warning should occur', function(done) { let traced = false; const trace = console.trace; console.trace = function() { traced = true; console.trace = trace; }; const schema = new Schema({ title: String, embed1: [new Schema({name: String})], embed2: [new Schema({name: String})], embed3: [new Schema({name: String})], embed4: [new Schema({name: String})], embed5: [new Schema({name: String})], embed6: [new Schema({name: String})], embed7: [new Schema({name: String})], embed8: [new Schema({name: String})], embed9: [new Schema({name: String})], embed10: [new Schema({name: String})], embed11: [new Schema({name: String})] }); const S = db.model('noMaxListeners', schema); new S({title: 'test'}); assert.equal(traced, false); done(); }); it('unselected required fields should pass validation', function(done) { const Tschema = new Schema({ name: String, req: {type: String, required: true} }); const T = db.model('unselectedRequiredFieldValidation', Tschema); const t = new T({name: 'teeee', req: 'i am required'}); t.save(function(err) { assert.ifError(err); T.findById(t).select('name').exec(function(err, t) { assert.ifError(err); assert.equal(t.req, void 0); t.name = 'wooo'; t.save(function(err) { assert.ifError(err); T.findById(t).select('name').exec(function(err, t) { assert.ifError(err); t.req = undefined; t.save(function(err) { err = String(err); const invalid = /Path `req` is required./.test(err); assert.ok(invalid); t.req = 'it works again'; t.save(function(err) { assert.ifError(err); T.findById(t).select('_id').exec(function(err, t) { assert.ifError(err); t.save(function(err) { assert.ifError(err); done(); }); }); }); }); }); }); }); }); }); describe('#validate', function() { const collection = 'validateschema_' + random(); it('works (gh-891)', function(done) { let schema = null; let called = false; const validate = [function() { called = true; return true; }, 'BAM']; schema = new Schema({ prop: {type: String, required: true, validate: validate}, nick: {type: String, required: true} }); const M = db.model('validateSchema', schema, collection); const m = new M({prop: 'gh891', nick: 'validation test'}); m.save(function(err) { assert.ifError(err); assert.equal(called, true); called = false; M.findById(m, 'nick', function(err, m) { assert.equal(called, false); assert.ifError(err); m.nick = 'gh-891'; m.save(function(err) { assert.equal(called, false); assert.ifError(err); done(); }); }); }); }); it('can return a promise', function(done) { let schema = null; const validate = [function() { return true; }, 'BAM']; schema = new Schema({ prop: {type: String, required: true, validate: validate}, nick: {type: String, required: true} }); const M = db.model('validateSchemaPromise', schema, collection); const m = new M({prop: 'gh891', nick: 'validation test'}); const mBad = new M({prop: 'other'}); const promise = m.validate(); promise.then(function() { const promise2 = mBad.validate(); promise2.catch(function(err) { assert.ok(!!err); clearTimeout(timeout); done(); }); }); const timeout = setTimeout(function() { db.close(); throw new Error('Promise not fulfilled!'); }, 500); }); it('doesnt have stale cast errors (gh-2766)', function(done) { const testSchema = new Schema({name: String}); const M = db.model('gh2766', testSchema); const m = new M({_id: 'this is not a valid _id'}); assert.ok(!m.$isValid('_id')); assert.ok(m.validateSync().errors['_id'].name, 'CastError'); m._id = '000000000000000000000001'; assert.ok(m.$isValid('_id')); assert.ifError(m.validateSync()); m.validate(function(error) { assert.ifError(error); done(); }); }); it('cast errors persist across validate() calls (gh-2766)', function(done) { const db = start(); const testSchema = new Schema({name: String}); const M = db.model('gh2766', testSchema); const m = new M({_id: 'this is not a valid _id'}); assert.ok(!m.$isValid('_id')); m.validate(function(error) { assert.ok(error); assert.equal(error.errors['_id'].name, 'CastError'); m.validate(function(error) { assert.ok(error); assert.equal(error.errors['_id'].name, 'CastError'); const err1 = m.validateSync(); const err2 = m.validateSync(); assert.equal(err1.errors['_id'].name, 'CastError'); assert.equal(err2.errors['_id'].name, 'CastError'); db.close(done); }); }); }); it('returns a promise when there are no validators', function(done) { let schema = null; schema = new Schema({_id: String}); const M = db.model('validateSchemaPromise2', schema, collection); const m = new M(); const promise = m.validate(); promise.then(function() { clearTimeout(timeout); done(); }); const timeout = setTimeout(function() { db.close(); throw new Error('Promise not fulfilled!'); }, 500); }); describe('works on arrays', function() { it('with required', function(done) { const schema = new Schema({ name: String, arr: {type: [], required: true} }); const M = db.model('validateSchema-array1', schema, collection); const m = new M({name: 'gh1109-1', arr: null}); m.save(function(err) { assert.ok(/Path `arr` is required/.test(err)); m.arr = null; m.save(function(err) { assert.ok(/Path `arr` is required/.test(err)); m.arr = []; m.arr.push('works'); m.save(function(err) { assert.ifError(err); done(); }); }); }); }); it('with custom validator', function(done) { let called = false; function validator(val) { called = true; return val && val.length > 1; } const validate = [validator, 'BAM']; const schema = new Schema({ arr: {type: [], validate: validate} }); const M = db.model('validateSchema-array2', schema, collection); const m = new M({name: 'gh1109-2', arr: [1]}); assert.equal(called, false); m.save(function(err) { assert.equal(String(err), 'ValidationError: arr: BAM'); assert.equal(called, true); m.arr.push(2); called = false; m.save(function(err) { assert.equal(called, true); assert.ifError(err); done(); }); }); }); it('with both required + custom validator', function(done) { function validator(val) { return val && val.length > 1; } const validate = [validator, 'BAM']; const schema = new Schema({ arr: {type: [], required: true, validate: validate} }); const M = db.model('validateSchema-array3', schema, collection); const m = new M({name: 'gh1109-3', arr: null}); m.save(function(err) { assert.equal(err.errors.arr.message, 'Path `arr` is required.'); m.arr = [{nice: true}]; m.save(function(err) { assert.equal(String(err), 'ValidationError: arr: BAM'); m.arr.push(95); m.save(function(err) { assert.ifError(err); done(); }); }); }); }); }); it('validator should run only once gh-1743', function(done) { let count = 0; const Control = new Schema({ test: { type: String, validate: function(value, done) { count++; return done(true); } } }); const PostSchema = new Schema({ controls: [Control] }); const Post = db.model('post', PostSchema); const post = new Post({ controls: [{ test: 'xx' }] }); post.save(function() { assert.equal(count, 1); done(); }); }); it('validator should run only once per sub-doc gh-1743', function(done) { this.timeout(process.env.TRAVIS ? 8000 : 4500); let count = 0; const db = start(); const Control = new Schema({ test: { type: String, validate: function(value, done) { count++; return done(true); } } }); const PostSchema = new Schema({ controls: [Control] }); const Post = db.model('post', PostSchema); const post = new Post({ controls: [{ test: 'xx' }, { test: 'yy' }] }); post.save(function() { assert.equal(count, post.controls.length); db.close(done); }); }); it('validator should run in parallel', function(done) { let count = 0; let startTime, endTime; const SchemaWithValidator = new Schema({ preference: { type: String, required: true, validate: { validator: function validator(value, done) { count++; if (count === 1) startTime = Date.now(); else if (count === 4) endTime = Date.now(); setTimeout(done.bind(null, true), 150); }, isAsync: true } } }); const MWSV = db.model('mwv', new Schema({subs: [SchemaWithValidator]})); const m = new MWSV({ subs: [{ preference: 'xx' }, { preference: 'yy' }, { preference: '1' }, { preference: '2' }] }); m.save(function(err) { assert.ifError(err); assert.equal(count, 4); assert(endTime - startTime < 150 * 4); // serial >= 150 * 4, parallel < 150 * 4 done(); }); }); }); it('#invalidate', function(done) { let InvalidateSchema = null; let Post = null; let post = null; InvalidateSchema = new Schema({prop: {type: String}}, {strict: false}); mongoose.model('InvalidateSchema', InvalidateSchema); Post = db.model('InvalidateSchema'); post = new Post(); post.set({baz: 'val'}); const _err = post.invalidate('baz', 'validation failed for path {PATH}', 'val', 'custom error'); assert.ok(_err instanceof ValidationError); post.save(function(err) { assert.ok(err instanceof MongooseError); assert.ok(err instanceof ValidationError); assert.ok(err.errors.baz instanceof ValidatorError); assert.equal(err.errors.baz.message, 'validation failed for path baz'); assert.equal(err.errors.baz.path, 'baz'); assert.equal(err.errors.baz.value, 'val'); assert.equal(err.errors.baz.kind, 'custom error'); post.save(function(err) { assert.strictEqual(err, null); done(); }); }); }); describe('#equals', function() { describe('should work', function() { let S; let N; let O; let B; let M; before(function() { S = db.model('equals-S', new Schema({_id: String})); N = db.model('equals-N', new Schema({_id: Number})); O = db.model('equals-O', new Schema({_id: Schema.ObjectId})); B = db.model('equals-B', new Schema({_id: Buffer})); M = db.model('equals-I', new Schema({name: String}, {_id: false})); }); it('with string _ids', function(done) { const s1 = new S({_id: 'one'}); const s2 = new S({_id: 'one'}); assert.ok(s1.equals(s2)); done(); }); it('with number _ids', function(done) { const n1 = new N({_id: 0}); const n2 = new N({_id: 0}); assert.ok(n1.equals(n2)); done(); }); it('with ObjectId _ids', function(done) { let id = new mongoose.Types.ObjectId; let o1 = new O({_id: id}); let o2 = new O({_id: id}); assert.ok(o1.equals(o2)); id = String(new mongoose.Types.ObjectId); o1 = new O({_id: id}); o2 = new O({_id: id}); assert.ok(o1.equals(o2)); done(); }); it('with Buffer _ids', function(done) { const n1 = new B({_id: 0}); const n2 = new B({_id: 0}); assert.ok(n1.equals(n2)); done(); }); it('with _id disabled (gh-1687)', function(done) { const m1 = new M; const m2 = new M; assert.doesNotThrow(function() { m1.equals(m2); }); done(); }); }); }); describe('setter', function() { describe('order', function() { it('is applied correctly', function(done) { const date = 'Thu Aug 16 2012 09:45:59 GMT-0700'; const d = new TestDocument(); dateSetterCalled = false; d.date = date; assert.ok(dateSetterCalled); dateSetterCalled = false; assert.ok(d._doc.date instanceof Date); assert.ok(d.date instanceof Date); assert.equal(+d.date, +new Date(date)); done(); }); }); it('works with undefined (gh-1892)', function(done) { const d = new TestDocument(); d.nested.setr = undefined; assert.equal(d.nested.setr, 'undefined setter'); dateSetterCalled = false; d.date = undefined; d.validate(function(err) { assert.ifError(err); assert.ok(dateSetterCalled); done(); }); }); describe('on nested paths', function() { describe('using set(path, object)', function() { it('overwrites the entire object', function(done) { let doc = new TestDocument(); doc.init({ test: 'Test', nested: { age: 5 } }); doc.set('nested', {path: 'overwrite the entire nested object'}); assert.equal(doc.nested.age, undefined); assert.equal(Object.keys(doc._doc.nested).length, 1); assert.equal(doc.nested.path, 'overwrite the entire nested object'); assert.ok(doc.isModified('nested')); // vs merging using doc.set(object) doc.set({test: 'Test', nested: {age: 4}}); assert.equal(doc.nested.path, '4overwrite the entire nested object'); assert.equal(doc.nested.age, 4); assert.equal(Object.keys(doc._doc.nested).length, 2); assert.ok(doc.isModified('nested')); doc = new TestDocument(); doc.init({ test: 'Test', nested: { age: 5 } }); // vs merging using doc.set(path, object, {merge: true}) doc.set('nested', {path: 'did not overwrite the nested object'}, { merge: true }); assert.equal(doc.nested.path, '5did not overwrite the nested object'); assert.equal(doc.nested.age, 5); assert.equal(Object.keys(doc._doc.nested).length, 3); assert.ok(doc.isModified('nested')); doc = new TestDocument(); doc.init({ test: 'Test', nested: { age: 5 } }); doc.set({test: 'Test', nested: {age: 5}}); assert.ok(!doc.isModified()); assert.ok(!doc.isModified('test')); assert.ok(!doc.isModified('nested')); assert.ok(!doc.isModified('nested.age')); doc.nested = {path: 'overwrite the entire nested object', age: 5}; assert.equal(doc.nested.age, 5); assert.equal(Object.keys(doc._doc.nested).length, 2); assert.equal(doc.nested.path, '5overwrite the entire nested object'); assert.ok(doc.isModified('nested')); doc.nested.deep = {x: 'Hank and Marie'}; assert.equal(Object.keys(doc._doc.nested).length, 3); assert.equal(doc.nested.path, '5overwrite the entire nested object'); assert.ok(doc.isModified('nested')); assert.equal(doc.nested.deep.x, 'Hank and Marie'); doc = new TestDocument(); doc.init({ test: 'Test', nested: { age: 5 } }); doc.set('nested.deep', {x: 'Hank and Marie'}); assert.equal(Object.keys(doc._doc.nested).length, 2); assert.equal(Object.keys(doc._doc.nested.deep).length, 1); assert.ok(doc.isModified('nested')); assert.ok(!doc.isModified('nested.path')); assert.ok(!doc.isModified('nested.age')); assert.ok(doc.isModified('nested.deep')); assert.equal(doc.nested.deep.x, 'Hank and Marie'); done(); }); it('allows positional syntax on mixed nested paths (gh-6738)', function() { const schema = new Schema({ nested: {} }); const M = mongoose.model('gh6738', schema); const doc = new M({ 'nested.x': 'foo', 'nested.y': 42, 'nested.a.b.c': { d: { e: { f: 'g' } } } }); assert.strictEqual(doc.nested.x, 'foo'); assert.strictEqual(doc.nested.y, 42); assert.strictEqual(doc.nested.a.b.c.d.e.f, 'g'); }); it('gh-1954', function(done) { const schema = new Schema({ schedule: [new Schema({open: Number, close: Number})] }); const M = mongoose.model('Blog', schema); const doc = new M({ schedule: [{ open: 1000, close: 1900 }] }); assert.ok(doc.schedule[0] instanceof EmbeddedDocument); doc.set('schedule.0.open', 1100); assert.ok(doc.schedule); assert.ok(doc.schedule.isMongooseDocumentArray); assert.ok(doc.schedule[0] instanceof EmbeddedDocument); assert.equal(doc.schedule[0].open, 1100); assert.equal(doc.schedule[0].close, 1900); done(); }); }); describe('when overwriting with a document instance', function() { it('does not cause StackOverflows (gh-1234)', function(done) { const doc = new TestDocument({nested: {age: 35}}); doc.nested = doc.nested; assert.doesNotThrow(function() { doc.nested.age; }); done(); }); }); }); }); describe('virtual', function() { describe('setter', function() { let val; let M; before(function(done) { const schema = new mongoose.Schema({v: Number}); schema.virtual('thang').set(function(v) { val = v; }); M = db.model('gh-1154', schema); done(); }); it('works with objects', function(done) { new M({thang: {}}); assert.deepEqual({}, val); done(); }); it('works with arrays', function(done) { new M({thang: []}); assert.deepEqual([], val); done(); }); it('works with numbers', function(done) { new M({thang: 4}); assert.deepEqual(4, val); done(); }); it('works with strings', function(done) { new M({thang: '3'}); assert.deepEqual('3', val); done(); }); }); }); describe('gh-2082', function() { it('works', function(done) { const Parent = db.model('gh2082', parentSchema, 'gh2082'); const parent = new Parent({name: 'Hello'}); parent.save(function(err, parent) { assert.ifError(err); parent.children.push({counter: 0}); parent.save(function(err, parent) { assert.ifError(err); parent.children[0].counter += 1; parent.save(function(err, parent) { assert.ifError(err); parent.children[0].counter += 1; parent.save(function(err) { assert.ifError(err); Parent.findOne({}, function(error, parent) { assert.ifError(error); assert.equal(parent.children[0].counter, 2); done(); }); }); }); }); }); }); }); describe('gh-1933', function() { it('works', function(done) { const M = db.model('gh1933', new Schema({id: String, field: Number}), 'gh1933'); M.create({}, function(error) { assert.ifError(error); M.findOne({}, function(error, doc) { assert.ifError(error); doc.__v = 123; doc.field = 5; // .push({ _id: '123', type: '456' }); doc.save(function(error) { assert.ifError(error); done(); }); }); }); }); }); describe('gh-1638', function() { it('works', function(done) { const ItemChildSchema = new mongoose.Schema({ name: {type: String, required: true, default: 'hello'} }); const ItemParentSchema = new mongoose.Schema({ children: [ItemChildSchema] }); const ItemParent = db.model('gh-1638-1', ItemParentSchema, 'gh-1638-1'); const ItemChild = db.model('gh-1638-2', ItemChildSchema, 'gh-1638-2'); const c1 = new ItemChild({name: 'first child'}); const c2 = new ItemChild({name: 'second child'}); const p = new ItemParent({ children: [c1, c2] }); p.save(function(error) { assert.ifError(error); c2.name = 'updated 2'; p.children = [c2]; p.save(function(error, doc) { assert.ifError(error); assert.equal(doc.children.length, 1); done(); }); }); }); }); describe('gh-2434', function() { it('will save the new value', function(done) { const ItemSchema = new mongoose.Schema({ st: Number, s: [] }); const Item = db.model('gh-2434', ItemSchema, 'gh-2434'); const item = new Item({st: 1}); item.save(function(error) { assert.ifError(error); item.st = 3; item.s = []; item.save(function(error) { assert.ifError(error); // item.st is 3 but may not be saved to DB Item.findById(item._id, function(error, doc) { assert.ifError(error); assert.equal(doc.st, 3); done(); }); }); }); }); }); it('properly calls queue functions (gh-2856)', function(done) { const personSchema = new mongoose.Schema({ name: String }); let calledName; personSchema.methods.fn = function() { calledName = this.name; }; personSchema.queue('fn'); const Person = db.model('gh2856', personSchema, 'gh2856'); new Person({name: 'Val'}); assert.equal(calledName, 'Val'); done(); }); describe('bug fixes', function() { it('applies toJSON transform correctly for populated docs (gh-2910) (gh-2990)', function(done) { const parentSchema = mongoose.Schema({ c: {type: mongoose.Schema.Types.ObjectId, ref: 'gh-2910-1'} }); let called = []; parentSchema.options.toJSON = { transform: function(doc, ret) { called.push(ret); return ret; } }; const childSchema = mongoose.Schema({ name: String }); let childCalled = []; childSchema.options.toJSON = { transform: function(doc, ret) { childCalled.push(ret); return ret; } }; const Child = db.model('gh-2910-1', childSchema); const Parent = db.model('gh-2910-0', parentSchema); Child.create({name: 'test'}, function(error, c) { Parent.create({c: c._id}, function(error, p) { Parent.findOne({_id: p._id}).populate('c').exec(function(error, p) { let doc = p.toJSON(); assert.equal(called.length, 1); assert.equal(called[0]._id.toString(), p._id.toString()); assert.equal(doc._id.toString(), p._id.toString()); assert.equal(childCalled.length, 1); assert.equal(childCalled[0]._id.toString(), c._id.toString()); called = []; childCalled = []; // JSON.stringify() passes field name, so make sure we don't treat // that as a param to toJSON (gh-2990) doc = JSON.parse(JSON.stringify({parent: p})).parent; assert.equal(called.length, 1); assert.equal(called[0]._id.toString(), p._id.toString()); assert.equal(doc._id.toString(), p._id.toString()); assert.equal(childCalled.length, 1); assert.equal(childCalled[0]._id.toString(), c._id.toString()); done(); }); }); }); }); it('single nested schema transform with save() (gh-5807)', function() { const embeddedSchema = new Schema({ test: String }); let called = false; embeddedSchema.options.toObject = { transform: function(doc, ret) { called = true; delete ret.test; return ret; } }; const topLevelSchema = new Schema({ embedded: embeddedSchema }); const MyModel = db.model('gh5807', topLevelSchema); return MyModel.create({}). then(function(doc) { doc.embedded = { test: '123' }; return doc.save(); }). then(function(doc) { return MyModel.findById(doc._id); }). then(function(doc) { assert.equal(doc.embedded.test, '123'); assert.ok(!called); }); }); it('setters firing with objects on real paths (gh-2943)', function(done) { const M = mongoose.model('gh2943', { myStr: { type: String, set: function(v) { return v.value; } }, otherStr: String }); const t = new M({myStr: {value: 'test'}}); assert.equal(t.myStr, 'test'); new M({otherStr: {value: 'test'}}); assert.ok(!t.otherStr); done(); }); describe('gh-2782', function() { it('should set data from a sub doc', function(done) { const schema1 = new mongoose.Schema({ data: { email: String } }); const schema2 = new mongoose.Schema({ email: String }); const Model1 = mongoose.model('gh-2782-1', schema1); const Model2 = mongoose.model('gh-2782-2', schema2); const doc1 = new Model1({'data.email': '[email protected]'}); assert.equal(doc1.data.email, '[email protected]'); const doc2 = new Model2(); doc2.set(doc1.data); assert.equal(doc2.email, '[email protected]'); done(); }); }); it('set data from subdoc keys (gh-3346)', function(done) { const schema1 = new mongoose.Schema({ data: { email: String } }); const Model1 = mongoose.model('gh3346', schema1); const doc1 = new Model1({'data.email': '[email protected]'}); assert.equal(doc1.data.email, '[email protected]'); const doc2 = new Model1({data: doc1.data}); assert.equal(doc2.data.email, '[email protected]'); done(); }); it('doesnt attempt to cast generic objects as strings (gh-3030)', function(done) { const M = mongoose.model('gh3030', { myStr: { type: String } }); const t = new M({myStr: {thisIs: 'anObject'}}); assert.ok(!t.myStr); t.validate(function(error) { assert.ok(error); done(); }); }); it('single embedded schemas 1 (gh-2689)', function(done) { const userSchema = new mongoose.Schema({ name: String, email: String }, {_id: false, id: false}); let userHookCount = 0; userSchema.pre('save', function(next) { ++userHookCount; next(); }); const eventSchema = new mongoose.Schema({ user: userSchema, name: String }); let eventHookCount = 0; eventSchema.pre('save', function(next) { ++eventHookCount; next(); }); const Event = db.model('gh2689', eventSchema); const e = new Event({name: 'test', user: {name: 123, email: 'val'}}); e.save(function(error) { assert.ifError(error); assert.strictEqual(e.user.name, '123'); assert.equal(eventHookCount, 1); assert.equal(userHookCount, 1); Event.findOne({user: {name: '123', email: 'val'}}, function(err, doc) { assert.ifError(err); assert.ok(doc); Event.findOne({user: {$in: [{name: '123', email: 'val'}]}}, function(err, doc) { assert.ifError(err); assert.ok(doc); done(); }); }); }); }); it('single embedded schemas with validation (gh-2689)', function(done) { const userSchema = new mongoose.Schema({ name: String, email: {type: String, required: true, match: /.+@.+/} }, {_id: false, id: false}); const eventSchema = new mongoose.Schema({ user: userSchema, name: String }); const Event = db.model('gh2689_1', eventSchema); const e = new Event({name: 'test', user: {}}); let error = e.validateSync(); assert.ok(error); assert.ok(error.errors['user.email']); assert.equal(error.errors['user.email'].kind, 'required'); e.user.email = 'val'; error = e.validateSync(); assert.ok(error); assert.ok(error.errors['user.email']); assert.equal(error.errors['user.email'].kind, 'regexp'); done(); }); it('single embedded parent() (gh-5134)', function(done) { const userSchema = new mongoose.Schema({ name: String, email: {type: String, required: true, match: /.+@.+/} }, {_id: false, id: false}); const eventSchema = new mongoose.Schema({ user: userSchema, name: String }); const Event = db.model('gh5134', eventSchema); const e = new Event({name: 'test', user: {}}); assert.strictEqual(e.user.parent(), e.user.ownerDocument()); done(); }); it('single embedded schemas with markmodified (gh-2689)', function(done) { const userSchema = new mongoose.Schema({ name: String, email: {type: String, required: true, match: /.+@.+/} }, {_id: false, id: false}); const eventSchema = new mongoose.Schema({ user: userSchema, name: String }); const Event = db.model('gh2689_2', eventSchema); const e = new Event({name: 'test', user: {email: 'a@b'}}); e.save(function(error, doc) { assert.ifError(error); assert.ok(doc); assert.ok(!doc.isModified('user')); assert.ok(!doc.isModified('user.email')); assert.ok(!doc.isModified('user.name')); doc.user.name = 'Val'; assert.ok(doc.isModified('user')); assert.ok(!doc.isModified('user.email')); assert.ok(doc.isModified('user.name')); const delta = doc.$__delta()[1]; assert.deepEqual(delta, { $set: {'user.name': 'Val'} }); doc.save(function(error) { assert.ifError(error); Event.findOne({_id: doc._id}, function(error, doc) { assert.ifError(error); assert.deepEqual(doc.user.toObject(), {email: 'a@b', name: 'Val'}); done(); }); }); }); }); it('single embedded schemas + update validators (gh-2689)', function(done) { const userSchema = new mongoose.Schema({ name: {type: String, default: 'Val'}, email: {type: String, required: true, match: /.+@.+/} }, {_id: false, id: false}); const eventSchema = new mongoose.Schema({ user: userSchema, name: String }); const Event = db.model('gh2689_3', eventSchema); const badUpdate = {$set: {'user.email': 'a'}}; const options = {runValidators: true}; Event.updateOne({}, badUpdate, options, function(error) { assert.ok(error); assert.equal(error.errors['user.email'].kind, 'regexp'); const nestedUpdate = {name: 'test'}; const options = {upsert: true, setDefaultsOnInsert: true}; Event.updateOne({}, nestedUpdate, options, function(error) { assert.ifError(error); Event.findOne({name: 'test'}, function(error, ev) { assert.ifError(error); assert.equal(ev.user.name, 'Val'); done(); }); }); }); }); it('single embedded schema update validators ignore _id (gh-6269)', function() { return co(function*() { const subDocSchema = new mongoose.Schema({ name: String }); const schema = new mongoose.Schema({ subDoc: subDocSchema, test: String }); const Model = db.model('gh6269', schema); const fakeDoc = new Model({}); yield Model.create({}); // toggle to false to see correct behavior // where subdoc is not created const setDefaultsFlag = true; const res = yield Model.findOneAndUpdate({ _id: fakeDoc._id }, { test: 'test' }, { setDefaultsOnInsert: setDefaultsFlag, upsert: true, new: true }); assert.equal(res.test, 'test'); assert.ok(!res.subDoc); }); }); }); describe('error processing (gh-2284)', function() { it('save errors', function(done) { const schema = new Schema({ name: { type: String, required: true } }); schema.post('save', function(error, doc, next) { assert.ok(doc instanceof Model); next(new Error('Catch all')); }); schema.post('save', function(error, doc, next) { assert.ok(doc instanceof Model); next(new Error('Catch all #2')); }); const Model = mongoose.model('gh2284', schema); Model.create({}, function(error) { assert.ok(error); assert.equal(error.message, 'Catch all #2'); done(); }); }); it('validate errors (gh-4885)', function(done) { const testSchema = new Schema({ title: { type: String, required: true } }); let called = 0; testSchema.post('validate', function(error, doc, next) { ++called; next(error); }); const Test = db.model('gh4885', testSchema); Test.create({}, function(error) { assert.ok(error); assert.equal(called, 1); done(); }); }); it('handles non-errors', function(done) { const schema = new Schema({ name: { type: String, required: true } }); schema.post('save', function(error, doc, next) { next(new Error('Catch all')); }); schema.post('save', function(error, doc, next) { next(new Error('Catch all #2')); }); const Model = db.model('gh2284_1', schema); Model.create({ name: 'test' }, function(error) { assert.ifError(error); done(); }); }); }); describe('bug fixes', function() { let db; before(function() { db = start(); }); after(function(done) { db.close(done); }); it('single embedded schemas with populate (gh-3501)', function(done) { const PopulateMeSchema = new Schema({}); const Child = db.model('gh3501', PopulateMeSchema); const SingleNestedSchema = new Schema({ populateMeArray: [{ type: Schema.Types.ObjectId, ref: 'gh3501' }] }); const parentSchema = new Schema({ singleNested: SingleNestedSchema }); const P = db.model('gh3501_1', parentSchema); Child.create([{}, {}], function(error, docs) { assert.ifError(error); const obj = { singleNested: {populateMeArray: [docs[0]._id, docs[1]._id]} }; P.create(obj, function(error, doc) { assert.ifError(error); P. findById(doc._id). populate('singleNested.populateMeArray'). exec(function(error, doc) { assert.ok(doc.singleNested.populateMeArray[0]._id); done(); }); }); }); }); it('single embedded schemas with methods (gh-3534)', function(done) { const personSchema = new Schema({name: String}); personSchema.methods.firstName = function() { return this.name.substr(0, this.name.indexOf(' ')); }; const bandSchema = new Schema({leadSinger: personSchema}); const Band = db.model('gh3534', bandSchema); const gnr = new Band({leadSinger: {name: 'Axl Rose'}}); assert.equal(gnr.leadSinger.firstName(), 'Axl'); done(); }); it('single embedded schemas with models (gh-3535)', function(done) { const personSchema = new Schema({name: String}); const Person = db.model('gh3535_0', personSchema); const bandSchema = new Schema({leadSinger: personSchema}); const Band = db.model('gh3535', bandSchema); const axl = new Person({name: 'Axl Rose'}); const gnr = new Band({leadSinger: axl}); gnr.save(function(error) { assert.ifError(error); assert.equal(gnr.leadSinger.name, 'Axl Rose'); done(); }); }); it('single embedded schemas with indexes (gh-3594)', function(done) { const personSchema = new Schema({name: {type: String, unique: true}}); const bandSchema = new Schema({leadSinger: personSchema}); assert.equal(bandSchema.indexes().length, 1); const index = bandSchema.indexes()[0]; assert.deepEqual(index[0], {'leadSinger.name': 1}); assert.ok(index[1].unique); done(); }); it('removing single embedded docs (gh-3596)', function(done) { const personSchema = new Schema({name: String}); const bandSchema = new Schema({guitarist: personSchema, name: String}); const Band = db.model('gh3596', bandSchema); const gnr = new Band({ name: 'Guns N\' Roses', guitarist: {name: 'Slash'} }); gnr.save(function(error, gnr) { assert.ifError(error); gnr.guitarist = undefined; gnr.save(function(error, gnr) { assert.ifError(error); assert.ok(!gnr.guitarist); done(); }); }); }); it('setting single embedded docs (gh-3601)', function(done) { const personSchema = new Schema({name: String}); const bandSchema = new Schema({guitarist: personSchema, name: String}); const Band = db.model('gh3601', bandSchema); const gnr = new Band({ name: 'Guns N\' Roses', guitarist: {name: 'Slash'} }); const velvetRevolver = new Band({ name: 'Velvet Revolver' }); velvetRevolver.guitarist = gnr.guitarist; velvetRevolver.save(function(error) { assert.ifError(error); assert.equal(velvetRevolver.guitarist, gnr.guitarist); done(); }); }); it('single embedded docs init obeys strict mode (gh-3642)', function(done) { const personSchema = new Schema({name: String}); const bandSchema = new Schema({guitarist: personSchema, name: String}); const Band = db.model('gh3642', bandSchema); const velvetRevolver = new Band({ name: 'Velvet Revolver', guitarist: {name: 'Slash', realName: 'Saul Hudson'} }); velvetRevolver.save(function(error) { assert.ifError(error); const query = {name: 'Velvet Revolver'}; Band.collection.findOne(query, function(error, band) { assert.ifError(error); assert.ok(!band.guitarist.realName); done(); }); }); }); it('single embedded docs post hooks (gh-3679)', function(done) { const postHookCalls = []; const personSchema = new Schema({name: String}); personSchema.post('save', function() { postHookCalls.push(this); }); const bandSchema = new Schema({guitarist: personSchema, name: String}); const Band = db.model('gh3679', bandSchema); const obj = {name: 'Guns N\' Roses', guitarist: {name: 'Slash'}}; Band.create(obj, function(error) { assert.ifError(error); setTimeout(function() { assert.equal(postHookCalls.length, 1); assert.equal(postHookCalls[0].name, 'Slash'); done(); }); }); }); it('single embedded docs .set() (gh-3686)', function(done) { const personSchema = new Schema({name: String, realName: String}); const bandSchema = new Schema({ guitarist: personSchema, name: String }); const Band = db.model('gh3686', bandSchema); const obj = { name: 'Guns N\' Roses', guitarist: {name: 'Slash', realName: 'Saul Hudson'} }; Band.create(obj, function(error, gnr) { gnr.set('guitarist.name', 'Buckethead'); gnr.save(function(error) { assert.ifError(error); assert.equal(gnr.guitarist.name, 'Buckethead'); assert.equal(gnr.guitarist.realName, 'Saul Hudson'); done(); }); }); }); it('single embedded docs with arrays pre hooks (gh-3680)', function(done) { const childSchema = new Schema({count: Number}); let preCalls = 0; childSchema.pre('save', function(next) { ++preCalls; next(); }); const SingleNestedSchema = new Schema({ children: [childSchema] }); const ParentSchema = new Schema({ singleNested: SingleNestedSchema }); const Parent = db.model('gh3680', ParentSchema); const obj = {singleNested: {children: [{count: 0}]}}; Parent.create(obj, function(error) { assert.ifError(error); assert.equal(preCalls, 1); done(); }); }); it('nested single embedded doc validation (gh-3702)', function(done) { const childChildSchema = new Schema({count: {type: Number, min: 1}}); const childSchema = new Schema({child: childChildSchema}); const parentSchema = new Schema({child: childSchema}); const Parent = db.model('gh3702', parentSchema); const obj = {child: {child: {count: 0}}}; Parent.create(obj, function(error) { assert.ok(error); assert.ok(/ValidationError/.test(error.toString())); done(); }); }); it('handles virtuals with dots correctly (gh-3618)', function(done) { const testSchema = new Schema({nested: {type: Object, default: {}}}); testSchema.virtual('nested.test').get(function() { return true; }); const Test = db.model('gh3618', testSchema); const test = new Test(); let doc = test.toObject({getters: true, virtuals: true}); delete doc._id; delete doc.id; assert.deepEqual(doc, {nested: {test: true}}); doc = test.toObject({getters: false, virtuals: true}); delete doc._id; delete doc.id; assert.deepEqual(doc, {nested: {test: true}}); done(); }); it('handles pushing with numeric keys (gh-3623)', function(done) { const schema = new Schema({ array: [{ 1: { date: Date }, 2: { date: Date }, 3: { date: Date } }] }); const MyModel = db.model('gh3623', schema); const doc = {array: [{2: {}}]}; MyModel.collection.insertOne(doc, function(error) { assert.ifError(error); MyModel.findOne({_id: doc._id}, function(error, doc) { assert.ifError(error); doc.array.push({2: {}}); doc.save(function(error) { assert.ifError(error); done(); }); }); }); }); it('execPopulate (gh-3753)', function(done) { const childSchema = new Schema({ name: String }); const parentSchema = new Schema({ name: String, children: [{type: ObjectId, ref: 'gh3753'}] }); const Child = db.model('gh3753', childSchema); const Parent = db.model('gh3753_0', parentSchema); Child.create({name: 'Luke Skywalker'}, function(error, child) { assert.ifError(error); const doc = {name: 'Darth Vader', children: [child._id]}; Parent.create(doc, function(error, doc) { Parent.findOne({_id: doc._id}, function(error, doc) { assert.ifError(error); assert.ok(doc); doc.populate('children').execPopulate().then(function(doc) { assert.equal(doc.children.length, 1); assert.equal(doc.children[0].name, 'Luke Skywalker'); done(); }); }); }); }); }); it('handles 0 for numeric subdoc ids (gh-3776)', function(done) { const personSchema = new Schema({ _id: Number, name: String, age: Number, friends: [{type: Number, ref: 'gh3776'}] }); const Person = db.model('gh3776', personSchema); const people = [ {_id: 0, name: 'Alice'}, {_id: 1, name: 'Bob'} ]; Person.create(people, function(error, people) { assert.ifError(error); const alice = people[0]; alice.friends.push(people[1]); alice.save(function(error) { assert.ifError(error); done(); }); }); }); it('handles conflicting names (gh-3867)', function(done) { const testSchema = new Schema({ name: { type: String, required: true }, things: [{ name: { type: String, required: true } }] }); const M = mongoose.model('gh3867', testSchema); const doc = M({ things: [{}] }); const fields = Object.keys(doc.validateSync().errors).sort(); assert.deepEqual(fields, ['name', 'things.0.name']); done(); }); it('populate with lean (gh-3873)', function(done) { const companySchema = new mongoose.Schema({ name: String, description: String, userCnt: { type: Number, default: 0, select: false } }); const userSchema = new mongoose.Schema({ name: String, company: { type: mongoose.Schema.Types.ObjectId, ref: 'gh3873' } }); const Company = db.model('gh3873', companySchema); const User = db.model('gh3873_0', userSchema); const company = new Company({ name: 'IniTech', userCnt: 1 }); const user = new User({ name: 'Peter', company: company._id }); company.save(function(error) { assert.ifError(error); user.save(function(error) { assert.ifError(error); next(); }); }); function next() { const pop = { path: 'company', select: 'name', options: { lean: true } }; User.find({}).populate(pop).exec(function(error, docs) { assert.ifError(error); assert.equal(docs.length, 1); assert.strictEqual(docs[0].company.userCnt, undefined); done(); }); } }); it('init single nested subdoc with select (gh-3880)', function(done) { const childSchema = new mongoose.Schema({ name: { type: String }, friends: [{ type: String }] }); const parentSchema = new mongoose.Schema({ name: { type: String }, child: childSchema }); const Parent = db.model('gh3880', parentSchema); const p = new Parent({ name: 'Mufasa', child: { name: 'Simba', friends: ['Pumbaa', 'Timon', 'Nala'] } }); p.save(function(error) { assert.ifError(error); const fields = 'name child.name'; Parent.findById(p._id).select(fields).exec(function(error, doc) { assert.ifError(error); assert.strictEqual(doc.child.friends, void 0); done(); }); }); }); it('single nested subdoc isModified() (gh-3910)', function(done) { let called = 0; const ChildSchema = new Schema({ name: String }); ChildSchema.pre('save', function(next) { assert.ok(this.isModified('name')); ++called; next(); }); const ParentSchema = new Schema({ name: String, child: ChildSchema }); const Parent = db.model('gh3910', ParentSchema); const p = new Parent({ name: 'Darth Vader', child: { name: 'Luke Skywalker' } }); p.save(function(error) { assert.ifError(error); assert.strictEqual(called, 1); done(); }); }); it('pre and post as schema keys (gh-3902)', function(done) { const schema = new mongoose.Schema({ pre: String, post: String }, { versionKey: false }); const MyModel = db.model('gh3902', schema); MyModel.create({ pre: 'test', post: 'test' }, function(error, doc) { assert.ifError(error); assert.deepEqual(_.omit(doc.toObject(), '_id'), { pre: 'test', post: 'test' }); done(); }); }); it('manual population and isNew (gh-3982)', function(done) { const NestedModelSchema = new mongoose.Schema({ field: String }); const NestedModel = db.model('gh3982', NestedModelSchema); const ModelSchema = new mongoose.Schema({ field: String, array: [{ type: mongoose.Schema.ObjectId, ref: 'gh3982', required: true }] }); const Model = db.model('gh3982_0', ModelSchema); const nestedModel = new NestedModel({ 'field': 'nestedModel' }); nestedModel.save(function(error, nestedModel) { assert.ifError(error); Model.create({ array: [nestedModel._id] }, function(error, doc) { assert.ifError(error); Model.findById(doc._id).populate('array').exec(function(error, doc) { assert.ifError(error); doc.array.push(nestedModel); assert.strictEqual(doc.isNew, false); assert.strictEqual(doc.array[0].isNew, false); assert.strictEqual(doc.array[1].isNew, false); assert.strictEqual(nestedModel.isNew, false); done(); }); }); }); }); it('manual population with refPath (gh-7070)', function() { const ChildModelSchema = new mongoose.Schema({ name: String }); const ChildModel = db.model('gh7070_Child', ChildModelSchema); const ParentModelSchema = new mongoose.Schema({ model: String, childId: { type: mongoose.ObjectId, refPath: 'model' }, otherId: mongoose.ObjectId }); const ParentModel = db.model('gh7070', ParentModelSchema); return co(function*() { const child = yield ChildModel.create({ name: 'test' }); let parent = yield ParentModel.create({ model: 'gh7070_Child', childId: child._id }); parent = yield ParentModel.findOne(); parent.childId = child; parent.otherId = child; assert.equal(parent.childId.name, 'test'); assert.ok(parent.otherId instanceof mongoose.Types.ObjectId); }); }); it('doesnt skipId for single nested subdocs (gh-4008)', function(done) { const childSchema = new Schema({ name: String }); const parentSchema = new Schema({ child: childSchema }); const Parent = db.model('gh4008', parentSchema); Parent.create({ child: { name: 'My child' } }, function(error, doc) { assert.ifError(error); Parent.collection.findOne({ _id: doc._id }, function(error, doc) { assert.ifError(error); assert.ok(doc.child._id); done(); }); }); }); it('single embedded docs with $near (gh-4014)', function(done) { const schema = new mongoose.Schema({ placeName: String }); const geoSchema = new mongoose.Schema({ type: { type: String, enum: 'Point', default: 'Point' }, coordinates: { type: [Number], default: [0, 0] } }); schema.add({ geo: geoSchema }); schema.index({ geo: '2dsphere' }); const MyModel = db.model('gh4014', schema); MyModel.on('index', function(err) { assert.ifError(err); MyModel. where('geo').near({ center: [50, 50], spherical: true }). exec(function(err) { assert.ifError(err); done(); }); }); }); it('skip validation if required returns false (gh-4094)', function(done) { const schema = new Schema({ div: { type: Number, required: function() { return false; }, validate: function(v) { return !!v; } } }); const Model = db.model('gh4094', schema); const m = new Model(); assert.ifError(m.validateSync()); done(); }); it('ability to overwrite array default (gh-4109)', function(done) { const schema = new Schema({ names: { type: [String], default: void 0 } }); const Model = db.model('gh4109', schema); const m = new Model(); assert.ok(!m.names); m.save(function(error, m) { assert.ifError(error); Model.collection.findOne({ _id: m._id }, function(error, doc) { assert.ifError(error); assert.ok(!('names' in doc)); done(); }); }); }); it('validation works when setting array index (gh-3816)', function(done) { const mySchema = new mongoose.Schema({ items: [ { month: Number, date: Date } ] }); const Test = db.model('test', mySchema); const a = [ { month: 0, date: new Date() }, { month: 1, date: new Date() } ]; Test.create({ items: a }, function(error, doc) { assert.ifError(error); Test.findById(doc._id).exec(function(error, doc) { assert.ifError(error); assert.ok(doc); doc.items[0] = { month: 5, date : new Date() }; doc.markModified('items'); doc.save(function(error) { assert.ifError(error); done(); }); }); }); }); it('validateSync works when setting array index nested (gh-5389)', function(done) { const childSchema = new mongoose.Schema({ _id: false, name: String, age: Number }); const schema = new mongoose.Schema({ name: String, children: [childSchema] }); const Model = db.model('gh5389', schema); Model. create({ name: 'test', children: [ { name: 'test-child', age: 24 } ] }). then(function(doc) { return Model.findById(doc._id); }). then(function(doc) { doc.children[0] = { name: 'updated-child', age: 53 }; const errors = doc.validateSync(); assert.ok(!errors); done(); }). catch(done); }); it('single embedded with defaults have $parent (gh-4115)', function(done) { const ChildSchema = new Schema({ name: { type: String, 'default': 'child' } }); const ParentSchema = new Schema({ child: { type: ChildSchema, 'default': {} } }); const Parent = db.model('gh4115', ParentSchema); const p = new Parent(); assert.equal(p.child.$parent, p); done(); }); it('removing parent doc calls remove hooks on subdocs (gh-2348) (gh-4566)', function(done) { const ChildSchema = new Schema({ name: String }); const called = {}; ChildSchema.pre('remove', function(next) { called[this.name] = true; next(); }); const ParentSchema = new Schema({ children: [ChildSchema], child: ChildSchema }); const Parent = db.model('gh2348', ParentSchema); const doc = { children: [{ name: 'Jacen' }, { name: 'Jaina' }], child: { name: 'Anakin' } }; Parent.create(doc, function(error, doc) { assert.ifError(error); doc.remove(function(error, doc) { assert.ifError(error); assert.deepEqual(called, { Jacen: true, Jaina: true, Anakin: true }); const arr = doc.children.toObject().map(function(v) { return v.name; }); assert.deepEqual(arr, ['Jacen', 'Jaina']); assert.equal(doc.child.name, 'Anakin'); done(); }); }); }); it('strings of length 12 are valid oids (gh-3365)', function(done) { const schema = new Schema({ myId: mongoose.Schema.Types.ObjectId }); const M = db.model('gh3365', schema); const doc = new M({ myId: 'blablablabla' }); doc.validate(function(error) { assert.ifError(error); done(); }); }); it('set() empty obj unmodifies subpaths (gh-4182)', function(done) { const omeletteSchema = new Schema({ topping: { meat: { type: String, enum: ['bacon', 'sausage'] }, cheese: Boolean } }); const Omelette = db.model('gh4182', omeletteSchema); const doc = new Omelette({ topping: { meat: 'bacon', cheese: true } }); doc.topping = {}; doc.save(function(error) { assert.ifError(error); assert.strictEqual(doc.topping.meat, void 0); done(); }); }); it('emits cb errors on model for save (gh-3499)', function(done) { const testSchema = new Schema({ name: String }); const Test = db.model('gh3499', testSchema); Test.on('error', function(error) { assert.equal(error.message, 'fail!'); done(); }); new Test({}).save(function() { throw new Error('fail!'); }); }); it('emits cb errors on model for save with hooks (gh-3499)', function(done) { const testSchema = new Schema({ name: String }); testSchema.pre('save', function(next) { next(); }); testSchema.post('save', function(doc, next) { next(); }); const Test = db.model('gh3499_0', testSchema); Test.on('error', function(error) { assert.equal(error.message, 'fail!'); done(); }); new Test({}).save(function() { throw new Error('fail!'); }); }); it('emits cb errors on model for find() (gh-3499)', function(done) { const testSchema = new Schema({ name: String }); const Test = db.model('gh3499_1', testSchema); Test.on('error', function(error) { assert.equal(error.message, 'fail!'); done(); }); Test.find({}, function() { throw new Error('fail!'); }); }); it('emits cb errors on model for find() + hooks (gh-3499)', function(done) { const testSchema = new Schema({ name: String }); testSchema.post('find', function(results, next) { assert.equal(results.length, 0); next(); }); const Test = db.model('gh3499_2', testSchema); Test.on('error', function(error) { assert.equal(error.message, 'fail!'); done(); }); Test.find({}, function() { throw new Error('fail!'); }); }); it('clears subpaths when removing single nested (gh-4216)', function(done) { const RecurrenceSchema = new Schema({ frequency: Number, interval: { type: String, enum: ['days', 'weeks', 'months', 'years'] } }, { _id: false }); const EventSchema = new Schema({ name: { type: String, trim: true }, recurrence: RecurrenceSchema }); const Event = db.model('gh4216', EventSchema); const ev = new Event({ name: 'test', recurrence: { frequency: 2, interval: 'days' } }); ev.recurrence = null; ev.save(function(error) { assert.ifError(error); done(); }); }); it('using validator.isEmail as a validator (gh-4064) (gh-4084)', function(done) { const schema = new Schema({ email: { type: String, validate: validator.isEmail } }); const MyModel = db.model('gh4064', schema); MyModel.create({ email: 'invalid' }, function(error) { assert.ok(error); assert.ok(error.errors['email']); done(); }); }); it('setting path to empty object works (gh-4218)', function() { const schema = new Schema({ object: { nested: { field1: { type: Number, default: 1 } } } }); const MyModel = db.model('gh4218', schema); return co(function*() { let doc = yield MyModel.create({}); doc.object.nested = {}; yield doc.save(); doc = yield MyModel.collection.findOne({ _id: doc._id }); assert.deepEqual(doc.object.nested, {}); }); }); it('setting path to object with strict and no paths in the schema (gh-6436) (gh-4218)', function() { const schema = new Schema({ object: { nested: { field1: { type: Number, default: 1 } } } }); const MyModel = db.model('gh6436', schema); return co(function*() { let doc = yield MyModel.create({}); doc.object.nested = { field2: 'foo' }; // `field2` not in the schema yield doc.save(); doc = yield MyModel.collection.findOne({ _id: doc._id }); assert.deepEqual(doc.object.nested, {}); }); }); it('minimize + empty object (gh-4337)', function(done) { const SomeModelSchema = new mongoose.Schema({}, { minimize: false }); const SomeModel = mongoose.model('somemodel', SomeModelSchema); try { new SomeModel({}); } catch (error) { assert.ifError(error); } done(); }); describe('modifiedPaths', function() { it('doesnt markModified child paths if parent is modified (gh-4224)', function(done) { const childSchema = new Schema({ name: String }); const parentSchema = new Schema({ child: childSchema }); const Parent = db.model('gh4224', parentSchema); Parent.create({ child: { name: 'Jacen' } }, function(error, doc) { assert.ifError(error); doc.child = { name: 'Jaina' }; doc.child.name = 'Anakin'; assert.deepEqual(doc.modifiedPaths(), ['child']); assert.ok(doc.isModified('child.name')); done(); }); }); it('includeChildren option (gh-6134)', function(done) { const personSchema = new mongoose.Schema({ name: { type: String }, colors: { primary: { type: String, default: 'white', enum: ['blue', 'green', 'red', 'purple', 'yellow'] } } }); const Person = db.model('Person', personSchema); const luke = new Person({ name: 'Luke', colors: { primary: 'blue' } }); assert.deepEqual(luke.modifiedPaths(), ['name', 'colors', 'colors.primary']); const obiwan = new Person({ name: 'Obi-Wan' }); obiwan.colors.primary = 'blue'; assert.deepEqual(obiwan.modifiedPaths(), ['name', 'colors', 'colors.primary']); const anakin = new Person({ name: 'Anakin' }); anakin.colors = { primary: 'blue' }; assert.deepEqual(anakin.modifiedPaths({ includeChildren: true }), ['name', 'colors', 'colors.primary']); done(); }); it('includeChildren option with arrays (gh-5904)', function(done) { const teamSchema = new mongoose.Schema({ name: String, colors: { primary: { type: String, enum: ['blue', 'green', 'red', 'purple', 'yellow', 'white', 'black'] } }, members: [{ name: String, }] }); const Team = db.model('gh5904', teamSchema); const jedis = new Team({ name: 'Jedis', colors: { primary: 'blue' }, members: [{ name: 'luke' }] }); const paths = jedis.modifiedPaths({ includeChildren: true }); assert.deepEqual(paths, [ 'name', 'colors', 'colors.primary', 'members', 'members.0', 'members.0.name' ]); done(); }); it('1 level down nested paths get marked modified on initial set (gh-7313) (gh-6944)', function() { const testSchema = new Schema({ name: { first: String, last: String, }, relatives: { aunt: { name: String, }, uncle: { name: String, }, }, }); const M = db.model('gh7313', testSchema); const doc = new M({ name: { first: 'A', last: 'B' }, relatives: { aunt: { name: 'foo' }, uncle: { name: 'bar' } } }); assert.ok(doc.modifiedPaths().indexOf('name.first') !== -1); assert.ok(doc.modifiedPaths().indexOf('name.last') !== -1); assert.ok(doc.modifiedPaths().indexOf('relatives.aunt') !== -1); assert.ok(doc.modifiedPaths().indexOf('relatives.uncle') !== -1); return Promise.resolve(); }); }); it('single nested isNew (gh-4369)', function(done) { const childSchema = new Schema({ name: String }); const parentSchema = new Schema({ child: childSchema }); const Parent = db.model('gh4369', parentSchema); let remaining = 2; const doc = new Parent({ child: { name: 'Jacen' } }); doc.child.on('isNew', function(val) { assert.ok(!val); assert.ok(!doc.child.isNew); --remaining || done(); }); doc.save(function(error, doc) { assert.ifError(error); assert.ok(!doc.child.isNew); --remaining || done(); }); }); it('deep default array values (gh-4540)', function(done) { const schema = new Schema({ arr: [{ test: { type: Array, default: ['test'] } }] }); assert.doesNotThrow(function() { db.model('gh4540', schema); }); done(); }); it('default values with subdoc array (gh-4390)', function(done) { const childSchema = new Schema({ name: String }); const parentSchema = new Schema({ child: [childSchema] }); parentSchema.path('child').default([{ name: 'test' }]); const Parent = db.model('gh4390', parentSchema); Parent.create({}, function(error, doc) { assert.ifError(error); const arr = doc.toObject().child.map(function(doc) { assert.ok(doc._id); delete doc._id; return doc; }); assert.deepEqual(arr, [{ name: 'test' }]); done(); }); }); it('handles invalid dates (gh-4404)', function(done) { const testSchema = new Schema({ date: Date }); const Test = db.model('gh4404', testSchema); Test.create({ date: new Date('invalid date') }, function(error) { assert.ok(error); assert.equal(error.errors['date'].name, 'CastError'); done(); }); }); it('setting array subpath (gh-4472)', function(done) { const ChildSchema = new mongoose.Schema({ name: String, age: Number }, { _id: false }); const ParentSchema = new mongoose.Schema({ data: { children: [ChildSchema] } }); const Parent = db.model('gh4472', ParentSchema); const p = new Parent(); p.set('data.children.0', { name: 'Bob', age: 900 }); assert.deepEqual(p.toObject().data.children, [{ name: 'Bob', age: 900 }]); done(); }); it('ignore paths (gh-4480)', function() { const TestSchema = new Schema({ name: { type: String, required: true } }); const Test = db.model('gh4480', TestSchema); return co(function*() { yield Test.create({ name: 'val' }); let doc = yield Test.findOne(); doc.name = null; doc.$ignore('name'); yield doc.save(); doc = yield Test.findById(doc._id); assert.equal(doc.name, 'val'); }); }); it('ignore subdocs paths (gh-4480) (gh-6152)', function() { const childSchema = new Schema({ name: { type: String, required: true } }); const testSchema = new Schema({ child: childSchema, children: [childSchema] }); const Test = db.model('gh6152', testSchema); return co(function*() { yield Test.create({ child: { name: 'testSingle' }, children: [{ name: 'testArr' }] }); let doc = yield Test.findOne(); doc.child.name = null; doc.child.$ignore('name'); yield doc.save(); doc = yield Test.findById(doc._id); assert.equal(doc.child.name, 'testSingle'); doc.children[0].name = null; doc.children[0].$ignore('name'); yield doc.save(); doc = yield Test.findById(doc._id); assert.equal(doc.children[0].name, 'testArr'); }); }); it('composite _ids (gh-4542)', function(done) { const schema = new Schema({ _id: { key1: String, key2: String }, content: String }); const Model = db.model('gh4542', schema); const object = new Model(); object._id = {key1: 'foo', key2: 'bar'}; object.save(). then(function(obj) { obj.content = 'Hello'; return obj.save(); }). then(function(obj) { return Model.findOne({ _id: obj._id }); }). then(function(obj) { assert.equal(obj.content, 'Hello'); done(); }). catch(done); }); it('validateSync with undefined and conditional required (gh-4607)', function(done) { const schema = new mongoose.Schema({ type: mongoose.SchemaTypes.Number, conditional: { type: mongoose.SchemaTypes.String, required: function() { return this.type === 1; }, maxlength: 128 } }); const Model = db.model('gh4607', schema); assert.doesNotThrow(function() { new Model({ type: 2, conditional: void 0 }).validateSync(); }); done(); }); it('conditional required on single nested (gh-4663)', function(done) { const childSchema = new Schema({ name: String }); const schema = new Schema({ child: { type: childSchema, required: function() { assert.equal(this.child.name, 'test'); } } }); const M = db.model('gh4663', schema); new M({ child: { name: 'test' } }).validateSync(); done(); }); it('setting full path under single nested schema works (gh-4578) (gh-4528)', function(done) { const ChildSchema = new mongoose.Schema({ age: Number }); const ParentSchema = new mongoose.Schema({ age: Number, family: { child: ChildSchema } }); const M = db.model('gh4578', ParentSchema); M.create({ age: 45 }, function(error, doc) { assert.ifError(error); assert.ok(!doc.family.child); doc.set('family.child.age', 15); assert.ok(doc.family.child.schema); assert.ok(doc.isModified('family.child')); assert.ok(doc.isModified('family.child.age')); assert.equal(doc.family.child.toObject().age, 15); done(); }); }); it('setting a nested path retains nested modified paths (gh-5206)', function(done) { const testSchema = new mongoose.Schema({ name: String, surnames: { docarray: [{ name: String }] } }); const Cat = db.model('gh5206', testSchema); const kitty = new Cat({ name: 'Test', surnames: { docarray: [{ name: 'test1' }, { name: 'test2' }] } }); kitty.save(function(error) { assert.ifError(error); kitty.surnames = { docarray: [{ name: 'test1' }, { name: 'test2' }, { name: 'test3' }] }; assert.deepEqual(kitty.modifiedPaths(), ['surnames', 'surnames.docarray']); done(); }); }); it('toObject() does not depopulate top level (gh-3057)', function(done) { const Cat = db.model('gh3057', { name: String }); const Human = db.model('gh3057_0', { name: String, petCat: { type: mongoose.Schema.Types.ObjectId, ref: 'gh3057' } }); const kitty = new Cat({ name: 'Zildjian' }); const person = new Human({ name: 'Val', petCat: kitty }); assert.equal(kitty.toObject({ depopulate: true }).name, 'Zildjian'); assert.ok(!person.toObject({ depopulate: true }).petCat.name); done(); }); it('toObject() respects schema-level depopulate (gh-6313)', function(done) { const personSchema = Schema({ name: String, car: { type: Schema.Types.ObjectId, ref: 'gh6313_Car' } }); personSchema.set('toObject', { depopulate: true }); const carSchema = Schema({ name: String }); const Car = db.model('gh6313_Car', carSchema); const Person = db.model('gh6313_Person', personSchema); const car = new Car({ name: 'Ford' }); const person = new Person({ name: 'John', car: car }); assert.equal(person.toObject().car.toHexString(), car._id.toHexString()); done(); }); it('single nested doc conditional required (gh-4654)', function(done) { const ProfileSchema = new Schema({ firstName: String, lastName: String }); function validator() { assert.equal(this.email, 'test'); return true; } const UserSchema = new Schema({ email: String, profile: { type: ProfileSchema, required: [validator, 'profile required'] } }); const User = db.model('gh4654', UserSchema); User.create({ email: 'test' }, function(error) { assert.equal(error.errors['profile'].message, 'profile required'); done(); }); }); it('handles setting single nested schema to equal value (gh-4676)', function(done) { const companySchema = new mongoose.Schema({ _id: false, name: String, description: String }); const userSchema = new mongoose.Schema({ name: String, company: companySchema }); const User = db.model('gh4676', userSchema); const user = new User({ company: { name: 'Test' } }); user.save(function(error) { assert.ifError(error); user.company.description = 'test'; assert.ok(user.isModified('company')); user.company = user.company; assert.ok(user.isModified('company')); done(); }); }); it('handles setting single nested doc to null after setting (gh-4766)', function(done) { const EntitySchema = new Schema({ company: { type: String, required: true }, name: { type: String, required: false }, email: { type: String, required: false } }, { _id: false, id: false }); const ShipmentSchema = new Schema({ entity: { shipper: { type: EntitySchema, required: false }, manufacturer: { type: EntitySchema, required: false } } }); const Shipment = db.model('gh4766', ShipmentSchema); const doc = new Shipment({ entity: { shipper: null, manufacturer: { company: 'test', name: 'test', email: 'test@email' } } }); doc.save(). then(function() { return Shipment.findById(doc._id); }). then(function(shipment) { shipment.entity = shipment.entity; shipment.entity.manufacturer = null; return shipment.save(); }). then(function() { done(); }). catch(done); }); it('buffers with subtypes as ids (gh-4506)', function(done) { const uuid = require('uuid'); const UserSchema = new mongoose.Schema({ _id: { type: Buffer, default: function() { return mongoose.Types.Buffer(uuid.parse(uuid.v4())).toObject(4); }, required: true }, email: { type: String, unique: true, lowercase: true, required: true }, name: String }); const User = db.model('gh4506', UserSchema); const user = new User({ email: '[email protected]', name: 'My name' }); user.save(). then(function() { return User.findOne({ email: '[email protected]' }); }). then(function(user) { user.name = 'other'; return user.save(); }). then(function() { return User.findOne({ email: '[email protected]' }); }). then(function(doc) { assert.equal(doc.name, 'other'); done(); }). catch(done); }); it('embedded docs dont mark parent as invalid (gh-4681)', function(done) { const NestedSchema = new mongoose.Schema({ nestedName: { type: String, required: true }, createdAt: { type: Date, required: true } }); const RootSchema = new mongoose.Schema({ rootName: String, nested: { type: [ NestedSchema ] } }); const Root = db.model('gh4681', RootSchema); const root = new Root({ rootName: 'root', nested: [ { } ] }); root.save(function(error) { assert.ok(error); assert.deepEqual(Object.keys(error.errors).sort(), ['nested.0.createdAt', 'nested.0.nestedName']); done(); }); }); it('should depopulate the shard key when saving (gh-4658)', function(done) { const ChildSchema = new mongoose.Schema({ name: String }); const ChildModel = db.model('gh4658', ChildSchema); const ParentSchema = new mongoose.Schema({ name: String, child: { type: Schema.Types.ObjectId, ref: 'gh4658' } }, {shardKey: {child: 1, _id: 1}}); const ParentModel = db.model('gh4658_0', ParentSchema); ChildModel.create({ name: 'Luke' }). then(function(child) { const p = new ParentModel({ name: 'Vader' }); p.child = child; return p.save(); }). then(function(p) { p.name = 'Anakin'; return p.save(); }). then(function(p) { return ParentModel.findById(p); }). then(function(doc) { assert.equal(doc.name, 'Anakin'); done(); }). catch(done); }); it('handles setting virtual subpaths (gh-4716)', function(done) { const childSchema = new Schema({ name: { type: String, default: 'John' }, favorites: { color: { type: String, default: 'Blue' } } }); const parentSchema = new Schema({ name: { type: String }, children: { type: [childSchema], default: [{}] } }); parentSchema.virtual('favorites').set(function(v) { return this.children[0].set('favorites', v); }).get(function() { return this.children[0].get('favorites'); }); const Parent = db.model('gh4716', parentSchema); const p = new Parent({ name: 'Anakin' }); p.set('children.0.name', 'Leah'); p.set('favorites.color', 'Red'); assert.equal(p.children[0].favorites.color, 'Red'); done(); }); it('handles selected nested elements with defaults (gh-4739)', function(done) { const userSchema = new Schema({ preferences: { sleep: { type: Boolean, default: false }, test: { type: Boolean, default: true } }, name: String }); const User = db.model('User', userSchema); const user = { name: 'test' }; User.collection.insertOne(user, function(error) { assert.ifError(error); User.findById(user, { 'preferences.sleep': 1, name: 1 }, function(error, user) { assert.ifError(error); assert.strictEqual(user.preferences.sleep, false); assert.ok(!user.preferences.test); done(); }); }); }); it('handles mark valid in subdocs correctly (gh-4778)', function(done) { const SubSchema = new mongoose.Schema({ field: { nestedField: { type: mongoose.Schema.ObjectId, required: false } } }, { _id: false, id: false }); const Model2Schema = new mongoose.Schema({ sub: { type: SubSchema, required: false } }); const Model2 = db.model('gh4778', Model2Schema); const doc = new Model2({ sub: {} }); doc.sub.field.nestedField = { }; doc.sub.field.nestedField = '574b69d0d9daf106aaa62974'; assert.ok(!doc.validateSync()); done(); }); it('timestamps set to false works (gh-7074)', function() { const schema = new Schema({ name: String }, { timestamps: false }); const Test = db.model('gh7074', schema); return co(function*() { const doc = yield Test.create({ name: 'test' }); assert.strictEqual(doc.updatedAt, undefined); assert.strictEqual(doc.createdAt, undefined); }); }); it('timestamps with nested paths (gh-5051)', function(done) { const schema = new Schema({ props: {} }, { timestamps: { createdAt: 'props.createdAt', updatedAt: 'props.updatedAt' } }); const M = db.model('gh5051', schema); const now = Date.now(); M.create({}, function(error, doc) { assert.ok(doc.props.createdAt); assert.ok(doc.props.createdAt instanceof Date); assert.ok(doc.props.createdAt.valueOf() >= now); assert.ok(doc.props.updatedAt); assert.ok(doc.props.updatedAt instanceof Date); assert.ok(doc.props.updatedAt.valueOf() >= now); done(); }); }); it('Declaring defaults in your schema with timestamps defined (gh-6024)', function(done) { const schemaDefinition = { name: String, misc: { hometown: String, isAlive: { type: Boolean, default: true } } }; const schemaWithTimestamps = new Schema(schemaDefinition, {timestamps: {createdAt: 'misc.createdAt'}}); const PersonWithTimestamps = db.model('Person_timestamps', schemaWithTimestamps); const dude = new PersonWithTimestamps({ name: 'Keanu', misc: {hometown: 'Beirut'} }); assert.equal(dude.misc.isAlive, true); done(); }); it('supports $where in pre save hook (gh-4004)', function(done) { const Promise = global.Promise; const schema = new Schema({ name: String }, { timestamps: true, versionKey: null }); schema.pre('save', function(next) { this.$where = { updatedAt: this.updatedAt }; next(); }); schema.post('save', function(error, res, next) { if (error instanceof MongooseError.DocumentNotFoundError) { error = new Error('Somebody else updated the document!'); } next(error); }); const MyModel = db.model('gh4004', schema); MyModel.create({ name: 'test' }). then(function() { return Promise.all([ MyModel.findOne(), MyModel.findOne() ]); }). then(function(docs) { docs[0].name = 'test2'; return Promise.all([ docs[0].save(), Promise.resolve(docs[1]) ]); }). then(function(docs) { docs[1].name = 'test3'; return docs[1].save(); }). then(function() { done(new Error('Should not get here')); }). catch(function(error) { assert.equal(error.message, 'Somebody else updated the document!'); done(); }); }); it('toObject() with buffer and minimize (gh-4800)', function(done) { const TestSchema = new mongoose.Schema({ buf: Buffer }, { toObject: { virtuals: true, getters: true } }); const Test = db.model('gh4800', TestSchema); Test.create({ buf: Buffer.from('abcd') }). then(function(doc) { return Test.findById(doc._id); }). then(function(doc) { // Should not throw require('util').inspect(doc); done(); }). catch(done); }); it('buffer subtype prop (gh-5530)', function(done) { const TestSchema = new mongoose.Schema({ uuid: { type: Buffer, subtype: 4 } }); const Test = db.model('gh5530', TestSchema); const doc = new Test({ uuid: 'test1' }); assert.equal(doc.uuid._subtype, 4); done(); }); it('runs validate hooks on single nested subdocs if not directly modified (gh-3884)', function(done) { const childSchema = new Schema({ name: { type: String }, friends: [{ type: String }] }); let count = 0; childSchema.pre('validate', function(next) { ++count; next(); }); const parentSchema = new Schema({ name: { type: String }, child: childSchema }); const Parent = db.model('gh3884', parentSchema); const p = new Parent({ name: 'Mufasa', child: { name: 'Simba', friends: ['Pumbaa', 'Timon', 'Nala'] } }); p.save(). then(function(p) { assert.equal(count, 1); p.child.friends.push('Rafiki'); return p.save(); }). then(function() { assert.equal(count, 2); done(); }). catch(done); }); it('runs validate hooks on arrays subdocs if not directly modified (gh-5861)', function(done) { const childSchema = new Schema({ name: { type: String }, friends: [{ type: String }] }); let count = 0; childSchema.pre('validate', function(next) { ++count; next(); }); const parentSchema = new Schema({ name: { type: String }, children: [childSchema] }); const Parent = db.model('gh5861', parentSchema); const p = new Parent({ name: 'Mufasa', children: [{ name: 'Simba', friends: ['Pumbaa', 'Timon', 'Nala'] }] }); p.save(). then(function(p) { assert.equal(count, 1); p.children[0].friends.push('Rafiki'); return p.save(); }). then(function() { assert.equal(count, 2); done(); }). catch(done); }); it('does not run schema type validator on single nested if not direct modified (gh-5885)', function() { let childValidateCalls = 0; const childSchema = new Schema({ name: String, otherProp: { type: String, validate: () => { ++childValidateCalls; return true; } } }); let validateCalls = 0; const parentSchema = new Schema({ child: { type: childSchema, validate: () => { ++validateCalls; return true; } } }); return co(function*() { const Parent = db.model('gh5885', parentSchema); const doc = yield Parent.create({ child: { name: 'test', otherProp: 'test' } }); assert.equal(childValidateCalls, 1); assert.equal(validateCalls, 1); childValidateCalls = 0; validateCalls = 0; doc.set('child.name', 'test2'); yield doc.validate(); assert.equal(childValidateCalls, 0); assert.equal(validateCalls, 0); }); }); it('does not overwrite when setting nested (gh-4793)', function(done) { const grandchildSchema = new mongoose.Schema(); grandchildSchema.method({ foo: function() { return 'bar'; } }); const Grandchild = db.model('gh4793_0', grandchildSchema); const childSchema = new mongoose.Schema({ grandchild: grandchildSchema }); const Child = mongoose.model('gh4793_1', childSchema); const parentSchema = new mongoose.Schema({ children: [childSchema] }); const Parent = mongoose.model('gh4793_2', parentSchema); const grandchild = new Grandchild(); const child = new Child({grandchild: grandchild}); assert.equal(child.grandchild.foo(), 'bar'); const p = new Parent({children: [child]}); assert.equal(child.grandchild.foo(), 'bar'); assert.equal(p.children[0].grandchild.foo(), 'bar'); done(); }); it('hooks/middleware for custom methods (gh-6385) (gh-7456)', function() { const mySchema = new Schema({ name: String }); mySchema.methods.foo = function(cb) { return cb(null, this.name); }; mySchema.methods.bar = function() { return this.name; }; mySchema.methods.baz = function(arg) { return Promise.resolve(arg); }; let preFoo = 0; let postFoo = 0; mySchema.pre('foo', function() { ++preFoo; }); mySchema.post('foo', function() { ++postFoo; }); let preBaz = 0; let postBaz = 0; mySchema.pre('baz', function() { ++preBaz; }); mySchema.post('baz', function() { ++postBaz; }); const MyModel = db.model('gh6385', mySchema); return co(function*() { const doc = new MyModel({ name: 'test' }); assert.equal(doc.bar(), 'test'); assert.equal(preFoo, 0); assert.equal(postFoo, 0); assert.equal(yield cb => doc.foo(cb), 'test'); assert.equal(preFoo, 1); assert.equal(postFoo, 1); assert.equal(preBaz, 0); assert.equal(postBaz, 0); assert.equal(yield doc.baz('foobar'), 'foobar'); assert.equal(preBaz, 1); assert.equal(preBaz, 1); }); }); it('custom methods with promises (gh-6385)', function() { const mySchema = new Schema({ name: String }); mySchema.methods.foo = function() { return Promise.resolve(this.name + ' foo'); }; mySchema.methods.bar = function() { return this.name + ' bar'; }; let preFoo = 0; let preBar = 0; mySchema.pre('foo', function() { ++preFoo; }); mySchema.pre('bar', function() { ++preBar; }); const MyModel = db.model('gh6385_1', mySchema); return co(function*() { const doc = new MyModel({ name: 'test' }); assert.equal(preFoo, 0); assert.equal(preBar, 0); let foo = doc.foo(); let bar = doc.bar(); assert.ok(foo instanceof Promise); assert.ok(bar instanceof Promise); foo = yield foo; bar = yield bar; assert.equal(preFoo, 1); assert.equal(preBar, 1); assert.equal(foo, 'test foo'); assert.equal(bar, 'test bar'); }); }); it('toString() as custom method (gh-6538)', function(done) { const commentSchema = new Schema({ title: String }); commentSchema.methods.toString = function() { return `${this.constructor.modelName}(${this.title})`; }; const Comment = db.model('gh6538_Comment', commentSchema); const c = new Comment({ title: 'test' }); assert.strictEqual('gh6538_Comment(test)', `${c}`); done(); }); it('setting to discriminator (gh-4935)', function(done) { const Buyer = db.model('gh4935_0', new Schema({ name: String, vehicle: { type: Schema.Types.ObjectId, ref: 'gh4935' } })); const Vehicle = db.model('gh4935', new Schema({ name: String })); const Car = Vehicle.discriminator('gh4935_1', new Schema({ model: String })); const eleanor = new Car({ name: 'Eleanor', model: 'Shelby Mustang GT' }); const nick = new Buyer({ name: 'Nicolas', vehicle: eleanor }); assert.ok(!!nick.vehicle); assert.ok(nick.vehicle === eleanor); assert.ok(nick.vehicle instanceof Car); assert.equal(nick.vehicle.name, 'Eleanor'); done(); }); it('handles errors in sync validators (gh-2185)', function(done) { const schema = new Schema({ name: { type: String, validate: function() { throw new Error('woops!'); } } }); const M = db.model('gh2185', schema); const error = (new M({ name: 'test' })).validateSync(); assert.ok(error); assert.equal(error.errors['name'].reason.message, 'woops!'); new M({ name: 'test'}).validate(function(error) { assert.ok(error); assert.equal(error.errors['name'].reason.message, 'woops!'); done(); }); }); it('allows hook as a schema key (gh-5047)', function(done) { const schema = new mongoose.Schema({ name: String, hook: { type: String } }); const Model = db.model('Model', schema); Model.create({ hook: 'test '}, function(error) { assert.ifError(error); done(); }); }); it('save errors with callback and promise work (gh-5216)', function(done) { const schema = new mongoose.Schema({}); const Model = db.model('gh5216', schema); const _id = new mongoose.Types.ObjectId(); const doc1 = new Model({ _id: _id }); const doc2 = new Model({ _id: _id }); let remaining = 2; Model.on('error', function(error) { assert.ok(error); --remaining || done(); }); doc1.save(). then(function() { return doc2.save(); }). catch(function(error) { assert.ok(error); --remaining || done(); }); }); it('post hooks on child subdocs run after save (gh-5085)', function(done) { const ChildModelSchema = new mongoose.Schema({ text: { type: String } }); ChildModelSchema.post('save', function(doc) { doc.text = 'bar'; }); const ParentModelSchema = new mongoose.Schema({ children: [ChildModelSchema] }); const Model = db.model('gh5085', ParentModelSchema); Model.create({ children: [{ text: 'test' }] }, function(error) { assert.ifError(error); Model.findOne({}, function(error, doc) { assert.ifError(error); assert.equal(doc.children.length, 1); assert.equal(doc.children[0].text, 'test'); done(); }); }); }); it('post hooks on array child subdocs run after save (gh-5085) (gh-6926)', function() { const subSchema = new Schema({ val: String }); subSchema.post('save', function() { return Promise.reject(new Error('Oops')); }); const schema = new Schema({ sub: subSchema }); const Test = db.model('gh6926', schema); const test = new Test({ sub: { val: 'test' } }); return test.save(). then(() => assert.ok(false), err => assert.equal(err.message, 'Oops')). then(() => Test.findOne()). then(doc => assert.equal(doc.sub.val, 'test')); }); it('nested docs toObject() clones (gh-5008)', function(done) { const schema = new mongoose.Schema({ sub: { height: Number } }); const Model = db.model('gh5008', schema); const doc = new Model({ sub: { height: 3 } }); assert.equal(doc.sub.height, 3); const leanDoc = doc.sub.toObject(); assert.equal(leanDoc.height, 3); doc.sub.height = 55; assert.equal(doc.sub.height, 55); assert.equal(leanDoc.height, 3); done(); }); it('toObject() with null (gh-5143)', function(done) { const schema = new mongoose.Schema({ customer: { name: { type: String, required: false } } }); const Model = db.model('gh5143', schema); const model = new Model(); model.customer = null; assert.strictEqual(model.toObject().customer, null); assert.strictEqual(model.toObject({ getters: true }).customer, null); done(); }); it('handles array subdocs with single nested subdoc default (gh-5162)', function(done) { const RatingsItemSchema = new mongoose.Schema({ value: Number }, { versionKey: false, _id: false }); const RatingsSchema = new mongoose.Schema({ ratings: { type: RatingsItemSchema, default: { id: 1, value: 0 } }, _id: false }); const RestaurantSchema = new mongoose.Schema({ menu: { type: [RatingsSchema] } }); const Restaurant = db.model('gh5162', RestaurantSchema); // Should not throw const r = new Restaurant(); assert.deepEqual(r.toObject().menu, []); done(); }); it('iterating through nested doc keys (gh-5078)', function(done) { const schema = new Schema({ nested: { test1: String, test2: String } }); schema.virtual('tests').get(function() { return _.map(this.nested, function(v) { return v; }); }); const M = db.model('gh5078', schema); const doc = new M({ nested: { test1: 'a', test2: 'b' } }); assert.deepEqual(doc.toObject({ virtuals: true }).tests, ['a', 'b']); // Should not throw require('util').inspect(doc); JSON.stringify(doc); done(); }); it('deeply nested virtual paths (gh-5250)', function(done) { const TestSchema = new Schema({}); TestSchema. virtual('a.b.c'). get(function() { return this.v; }). set(function(value) { this.v = value; }); const TestModel = db.model('gh5250', TestSchema); const t = new TestModel({'a.b.c': 5}); assert.equal(t.a.b.c, 5); done(); }); it('JSON.stringify nested errors (gh-5208)', function(done) { const AdditionalContactSchema = new Schema({ contactName: { type: String, required: true }, contactValue: { type: String, required: true } }); const ContactSchema = new Schema({ name: { type: String, required: true }, email: { type: String, required: true }, additionalContacts: [AdditionalContactSchema] }); const EmergencyContactSchema = new Schema({ contactName: { type: String, required: true }, contact: ContactSchema }); const EmergencyContact = db.model('EmergencyContact', EmergencyContactSchema); const contact = new EmergencyContact({ contactName: 'Electrical Service', contact: { name: 'John Smith', email: '[email protected]', additionalContacts: [ { contactName: 'skype' // Forgotten value } ] } }); contact.validate(function(error) { assert.ok(error); assert.ok(error.errors['contact']); assert.ok(error.errors['contact.additionalContacts.0.contactValue']); // This `JSON.stringify()` should not throw assert.ok(JSON.stringify(error).indexOf('contactValue') !== -1); done(); }); }); it('handles errors in subdoc pre validate (gh-5215)', function(done) { const childSchema = new mongoose.Schema({}); childSchema.pre('validate', function(next) { next(new Error('child pre validate')); }); const parentSchema = new mongoose.Schema({ child: childSchema }); const Parent = db.model('gh5215', parentSchema); Parent.create({ child: {} }, function(error) { assert.ok(error); assert.ok(error.errors['child']); assert.equal(error.errors['child'].message, 'child pre validate'); done(); }); }); it('custom error types (gh-4009)', function(done) { const CustomError = function() {}; const testSchema = new mongoose.Schema({ num: { type: Number, required: { ErrorConstructor: CustomError }, min: 5 } }); const Test = db.model('gh4009', testSchema); Test.create({}, function(error) { assert.ok(error); assert.ok(error.errors['num']); assert.ok(error.errors['num'] instanceof CustomError); Test.create({ num: 1 }, function(error) { assert.ok(error); assert.ok(error.errors['num']); assert.ok(error.errors['num'].constructor.name, 'ValidatorError'); assert.ok(!(error.errors['num'] instanceof CustomError)); done(); }); }); }); it('saving a doc with nested string array (gh-5282)', function(done) { const testSchema = new mongoose.Schema({ strs: [[String]] }); const Test = db.model('gh5282', testSchema); const t = new Test({ strs: [['a', 'b']] }); t.save(function(error, t) { assert.ifError(error); assert.deepEqual(t.toObject().strs, [['a', 'b']]); done(); }); }); it('push() onto a nested doc array (gh-6398)', function() { const schema = new mongoose.Schema({ name: String, array: [[{key: String, value: Number}]] }); const Model = db.model('gh6398', schema); return co(function*() { yield Model.create({ name: 'small', array: [[{ key: 'answer', value: 42 }]] }); let doc = yield Model.findOne(); assert.ok(doc); doc.array[0].push({ key: 'lucky', value: 7 }); yield doc.save(); doc = yield Model.findOne(); assert.equal(doc.array.length, 1); assert.equal(doc.array[0].length, 2); assert.equal(doc.array[0][1].key, 'lucky'); }); }); it('push() onto a triple nested doc array (gh-6602) (gh-6398)', function() { const schema = new mongoose.Schema({ array: [[[{key: String, value: Number}]]] }); const Model = db.model('gh6602', schema); return co(function*() { yield Model.create({ array: [[[{ key: 'answer', value: 42 }]]] }); let doc = yield Model.findOne(); assert.ok(doc); doc.array[0][0].push({ key: 'lucky', value: 7 }); yield doc.save(); doc = yield Model.findOne(); assert.equal(doc.array.length, 1); assert.equal(doc.array[0].length, 1); assert.equal(doc.array[0][0].length, 2); assert.equal(doc.array[0][0][1].key, 'lucky'); }); }); it('null _id (gh-5236)', function(done) { const childSchema = new mongoose.Schema({}); const M = db.model('gh5236', childSchema); const m = new M({ _id: null }); m.save(function(error, doc) { assert.equal(doc._id, null); done(); }); }); it('setting populated path with typeKey (gh-5313)', function(done) { const personSchema = Schema({ name: {$type: String}, favorite: { $type: Schema.Types.ObjectId, ref: 'gh5313' }, books: [{ $type: Schema.Types.ObjectId, ref: 'gh5313' }] }, { typeKey: '$type' }); const bookSchema = Schema({ title: String }); const Book = mongoose.model('gh5313', bookSchema); const Person = mongoose.model('gh5313_0', personSchema); const book1 = new Book({ title: 'The Jungle Book' }); const book2 = new Book({ title: '1984' }); const person = new Person({ name: 'Bob', favorite: book1, books: [book1, book2] }); assert.equal(person.books[0].title, 'The Jungle Book'); assert.equal(person.books[1].title, '1984'); done(); }); it('save twice with write concern (gh-5294)', function(done) { const schema = new mongoose.Schema({ name: String }, { safe: { w: 'majority', wtimeout: 1e4 } }); const M = db.model('gh5294', schema); M.create({ name: 'Test' }, function(error, doc) { assert.ifError(error); doc.name = 'test2'; doc.save(function(error) { assert.ifError(error); done(); }); }); }); it('undefined field with conditional required (gh-5296)', function(done) { const schema = Schema({ name: { type: String, maxlength: 63, required: function() { return false; } } }); const Model = db.model('gh5296', schema); Model.create({ name: undefined }, function(error) { assert.ifError(error); done(); }); }); it('dotted virtuals in toObject (gh-5473)', function(done) { const schema = new mongoose.Schema({}, { toObject: { virtuals: true }, toJSON: { virtuals: true } }); schema.virtual('test.a').get(function() { return 1; }); schema.virtual('test.b').get(function() { return 2; }); const Model = mongoose.model('gh5473', schema); const m = new Model({}); assert.deepEqual(m.toJSON().test, { a: 1, b: 2 }); assert.deepEqual(m.toObject().test, { a: 1, b: 2 }); assert.equal(m.toObject({ virtuals: false }).test, void 0); done(); }); it('dotted virtuals in toObject (gh-5506)', function(done) { const childSchema = new Schema({ name: String, _id: false }); const parentSchema = new Schema({ child: { type: childSchema, default: {} } }); const Parent = db.model('gh5506', parentSchema); const p = new Parent({ child: { name: 'myName' } }); p.save(). then(function() { return Parent.findOne(); }). then(function(doc) { doc.child = {}; return doc.save(); }). then(function() { return Parent.findOne(); }). then(function(doc) { assert.deepEqual(doc.toObject().child, {}); done(); }). catch(done); }); it('parent props not in child (gh-5470)', function(done) { const employeeSchema = new mongoose.Schema({ name: { first: String, last: String }, department: String }); const Employee = mongoose.model('Test', employeeSchema); const employee = new Employee({ name: { first: 'Ron', last: 'Swanson' }, department: 'Parks and Recreation' }); const ownPropertyNames = Object.getOwnPropertyNames(employee.name); assert.ok(ownPropertyNames.indexOf('department') === -1, ownPropertyNames.join(',')); assert.ok(ownPropertyNames.indexOf('first') !== -1, ownPropertyNames.join(',')); assert.ok(ownPropertyNames.indexOf('last') !== -1, ownPropertyNames.join(',')); done(); }); it('modifying array with existing ids (gh-5523)', function(done) { const friendSchema = new mongoose.Schema( { _id: String, name: String, age: Number, dob: Date }, { _id: false }); const socialSchema = new mongoose.Schema( { friends: [friendSchema] }, { _id: false }); const userSchema = new mongoose.Schema({ social: { type: socialSchema, required: true } }); const User = db.model('gh5523', userSchema); const user = new User({ social: { friends: [ { _id: 'val', age: 28 } ] } }); user.social.friends = [{ _id: 'val', name: 'Val' }]; assert.deepEqual(user.toObject().social.friends[0], { _id: 'val', name: 'Val' }); user.save(function(error) { assert.ifError(error); User.findOne({ _id: user._id }, function(error, doc) { assert.ifError(error); assert.deepEqual(doc.toObject().social.friends[0], { _id: 'val', name: 'Val' }); done(); }); }); }); it('consistent setter context for single nested (gh-5363)', function(done) { const contentSchema = new Schema({ blocks: [{ type: String }], summary: { type: String } }); // Subdocument setter const contexts = []; contentSchema.path('blocks').set(function(srcBlocks) { if (!this.ownerDocument().isNew) { contexts.push(this.toObject()); } return srcBlocks; }); const noteSchema = new Schema({ title: { type: String, required: true }, body: { type: contentSchema } }); const Note = db.model('gh5363', noteSchema); const note = new Note({ title: 'Lorem Ipsum Dolor', body: { summary: 'Summary Test', blocks: ['html'] } }); note.save(). then(function(note) { assert.equal(contexts.length, 0); note.set('body', { summary: 'New Summary', blocks: ['gallery', 'html'] }); return note.save(); }). then(function() { assert.equal(contexts.length, 1); assert.deepEqual(contexts[0].blocks, ['html']); done(); }). catch(done); }); it('deeply nested subdocs and markModified (gh-5406)', function(done) { const nestedValueSchema = new mongoose.Schema({ _id: false, value: Number }); const nestedPropertySchema = new mongoose.Schema({ _id: false, active: Boolean, nestedValue: nestedValueSchema }); const nestedSchema = new mongoose.Schema({ _id: false, nestedProperty: nestedPropertySchema, nestedTwoProperty: nestedPropertySchema }); const optionsSchema = new mongoose.Schema({ _id: false, nestedField: nestedSchema }); const TestSchema = new mongoose.Schema({ fieldOne: String, options: optionsSchema }); const Test = db.model('gh5406', TestSchema); const doc = new Test({ fieldOne: 'Test One', options: { nestedField: { nestedProperty: { active: true, nestedValue: { value: 42 } } } } }); doc. save(). then(function(doc) { doc.options.nestedField.nestedTwoProperty = { active: true, nestedValue: { value: 1337 } }; assert.ok(doc.isModified('options')); return doc.save(); }). then(function(doc) { return Test.findById(doc._id); }). then(function(doc) { assert.equal(doc.options.nestedField.nestedTwoProperty.nestedValue.value, 1337); done(); }). catch(done); }); it('single nested subdoc post remove hooks (gh-5388)', function(done) { const contentSchema = new Schema({ blocks: [{ type: String }], summary: { type: String } }); let called = 0; contentSchema.post('remove', function() { ++called; }); const noteSchema = new Schema({ body: { type: contentSchema } }); const Note = db.model('gh5388', noteSchema); const note = new Note({ title: 'Lorem Ipsum Dolor', body: { summary: 'Summary Test', blocks: ['html'] } }); note.save(function(error) { assert.ifError(error); note.remove(function(error) { assert.ifError(error); setTimeout(function() { assert.equal(called, 1); done(); }, 50); }); }); }); it('push populated doc onto empty array triggers manual population (gh-5504)', function(done) { const ReferringSchema = new Schema({ reference: [{ type: Schema.Types.ObjectId, ref: 'gh5504' }] }); const Referrer = db.model('gh5504', ReferringSchema); const referenceA = new Referrer(); const referenceB = new Referrer(); const referrerA = new Referrer({reference: [referenceA]}); const referrerB = new Referrer(); const referrerC = new Referrer(); const referrerD = new Referrer(); const referrerE = new Referrer(); referrerA.reference.push(referenceB); assert.ok(referrerA.reference[0] instanceof Referrer); assert.ok(referrerA.reference[1] instanceof Referrer); referrerB.reference.push(referenceB); assert.ok(referrerB.reference[0] instanceof Referrer); referrerC.reference.unshift(referenceB); assert.ok(referrerC.reference[0] instanceof Referrer); referrerD.reference.splice(0, 0, referenceB); assert.ok(referrerD.reference[0] instanceof Referrer); referrerE.reference.addToSet(referenceB); assert.ok(referrerE.reference[0] instanceof Referrer); done(); }); it('single nested conditional required scope (gh-5569)', function(done) { const scopes = []; const ThingSchema = new mongoose.Schema({ undefinedDisallowed: { type: String, required: function() { scopes.push(this); return this.undefinedDisallowed === undefined; }, default: null } }); const SuperDocumentSchema = new mongoose.Schema({ thing: { type: ThingSchema, default: function() { return {}; } } }); const SuperDocument = db.model('gh5569', SuperDocumentSchema); let doc = new SuperDocument(); doc.thing.undefinedDisallowed = null; doc.save(function(error) { assert.ifError(error); doc = new SuperDocument(); doc.thing.undefinedDisallowed = undefined; doc.save(function(error) { assert.ok(error); assert.ok(error.errors['thing.undefinedDisallowed']); done(); }); }); }); it('single nested setters only get called once (gh-5601)', function(done) { const vals = []; const ChildSchema = new mongoose.Schema({ number: { type: String, set: function(v) { vals.push(v); return v; } }, _id: false }); ChildSchema.set('toObject', { getters: true, minimize: false }); const ParentSchema = new mongoose.Schema({ child: { type: ChildSchema, default: {} } }); const Parent = db.model('gh5601', ParentSchema); const p = new Parent(); p.child = { number: '555.555.0123' }; assert.equal(vals.length, 1); assert.equal(vals[0], '555.555.0123'); done(); }); it('single getters only get called once (gh-7442)', function() { let called = 0; const childSchema = new Schema({ value: { type: String, get: function(v) { ++called; return v; } } }); const schema = new Schema({ name: childSchema }); const Model = db.model('gh7442', schema); const doc = new Model({ 'name.value': 'test' }); called = 0; doc.toObject({ getters: true }); assert.equal(called, 1); doc.toObject({ getters: false }); assert.equal(called, 1); return Promise.resolve(); }); it('setting doc array to array of top-level docs works (gh-5632)', function(done) { const MainSchema = new Schema({ name: { type: String }, children: [{ name: { type: String } }] }); const RelatedSchema = new Schema({ name: { type: String } }); const Model = db.model('gh5632', MainSchema); const RelatedModel = db.model('gh5632_0', RelatedSchema); RelatedModel.create({ name: 'test' }, function(error, doc) { assert.ifError(error); Model.create({ name: 'test1', children: [doc] }, function(error, m) { assert.ifError(error); m.children = [doc]; m.save(function(error) { assert.ifError(error); assert.equal(m.children.length, 1); assert.equal(m.children[0].name, 'test'); done(); }); }); }); }); it('Using set as a schema path (gh-1939)', function(done) { const testSchema = new Schema({ set: String }); const Test = db.model('gh1939', testSchema); const t = new Test({ set: 'test 1' }); assert.equal(t.set, 'test 1'); t.save(function(error) { assert.ifError(error); t.set = 'test 2'; t.save(function(error) { assert.ifError(error); assert.equal(t.set, 'test 2'); done(); }); }); }); it('handles array defaults correctly (gh-5780)', function(done) { const testSchema = new Schema({ nestedArr: { type: [[Number]], default: [[0, 1]] } }); const Test = db.model('gh5780', testSchema); const t = new Test({}); assert.deepEqual(t.toObject().nestedArr, [[0, 1]]); t.nestedArr.push([1, 2]); const t2 = new Test({}); assert.deepEqual(t2.toObject().nestedArr, [[0, 1]]); done(); }); it('sets path to the empty string on save after query (gh-6477)', function() { const schema = new Schema({ name: String, s: { type: String, default: '' } }); const Test = db.model('gh6477_2', schema); const test = new Test; assert.strictEqual(test.s, ''); return co(function* () { // use native driver directly to insert an empty doc yield Test.collection.insertOne({}); // udate the doc with the expectation that default booleans will be saved. const found = yield Test.findOne({}); found.name = 'Max'; yield found.save(); // use native driver directly to check doc for saved string const final = yield Test.collection.findOne({}); assert.strictEqual(final.name, 'Max'); assert.strictEqual(final.s, ''); }); }); it('sets path to the default boolean on save after query (gh-6477)', function() { const schema = new Schema({ name: String, f: { type: Boolean, default: false }, t: { type: Boolean, default: true } }); const Test = db.model('gh6477', schema); return co(function* () { // use native driver directly to kill the fields yield Test.collection.insertOne({}); // udate the doc with the expectation that default booleans will be saved. const found = yield Test.findOne({}); found.name = 'Britney'; yield found.save(); // use native driver directly to check doc for saved string const final = yield Test.collection.findOne({}); assert.strictEqual(final.name, 'Britney'); assert.strictEqual(final.t, true); assert.strictEqual(final.f, false); }); }); it('virtuals with no getters return undefined (gh-6223)', function(done) { const personSchema = new mongoose.Schema({ name: { type: String }, children: [{ name: { type: String } }] }, { toObject: { getters: true, virtuals: true }, toJSON: { getters: true, virtuals: true }, id: false }); personSchema.virtual('favoriteChild').set(function(v) { return this.set('children.0', v); }); personSchema.virtual('heir').get(function() { return this.get('children.0'); }); const Person = db.model('gh6223', personSchema); const person = new Person({ name: 'Anakin' }); assert.strictEqual(person.favoriteChild, void 0); assert.ok(!('favoriteChild' in person.toJSON())); assert.ok(!('favoriteChild' in person.toObject())); done(); }); it('add default getter/setter (gh-6262)', function(done) { const testSchema = new mongoose.Schema({}); testSchema.virtual('totalValue'); const Test = db.model('gh6262', testSchema); assert.equal(Test.schema.virtuals.totalValue.getters.length, 1); assert.equal(Test.schema.virtuals.totalValue.setters.length, 1); const doc = new Test(); doc.totalValue = 5; assert.equal(doc.totalValue, 5); done(); }); it('nested virtuals + nested toJSON (gh-6294)', function() { const schema = mongoose.Schema({ nested: { prop: String } }, { _id: false, id: false }); schema.virtual('nested.virtual').get(() => 'test 2'); schema.set('toJSON', { virtuals: true }); const MyModel = db.model('gh6294', schema); const doc = new MyModel({ nested: { prop: 'test 1' } }); assert.deepEqual(doc.toJSON(), { nested: { prop: 'test 1', virtual: 'test 2' } }); assert.deepEqual(doc.nested.toJSON(), { prop: 'test 1', virtual: 'test 2' }); }); it('Disallows writing to __proto__ and other special properties', function(done) { const schema = new mongoose.Schema({ name: String }, { strict: false }); const Model = db.model('prototest', schema); const doc = new Model({ '__proto__.x': 'foo' }); assert.strictEqual(Model.x, void 0); doc.set('__proto__.y', 'bar'); assert.strictEqual(Model.y, void 0); doc.set('constructor.prototype.z', 'baz'); assert.strictEqual(Model.z, void 0); done(); }); it('save() depopulates pushed arrays (gh-6048)', function() { const blogPostSchema = new Schema({ comments: [{ type: mongoose.Schema.Types.ObjectId, ref: 'gh6048_0' }] }); const BlogPost = db.model('gh6048', blogPostSchema); const commentSchema = new Schema({ text: String }); const Comment = db.model('gh6048_0', commentSchema); return co(function*() { let blogPost = yield BlogPost.create({}); const comment = yield Comment.create({ text: 'Hello' }); blogPost = yield BlogPost.findById(blogPost); blogPost.comments.push(comment); yield blogPost.save(); const savedBlogPost = yield BlogPost.collection. findOne({ _id: blogPost._id }); assert.equal(savedBlogPost.comments.length, 1); assert.equal(savedBlogPost.comments[0].constructor.name, 'ObjectID'); assert.equal(savedBlogPost.comments[0].toString(), blogPost.comments[0]._id.toString()); }); }); it('Handles setting populated path set via `Document#populate()` (gh-7302)', function() { const authorSchema = new Schema({ name: String }); const bookSchema = new Schema({ author: { type: mongoose.Schema.Types.ObjectId, ref: 'gh7302_Author' } }); const Author = db.model('gh7302_Author', authorSchema); const Book = db.model('gh7302_Book', bookSchema); return Author.create({ name: 'Victor Hugo' }). then(function(author) { return Book.create({ author: author._id }); }). then(function() { return Book.findOne(); }). then(function(doc) { return doc.populate('author').execPopulate(); }). then(function(doc) { doc.author = {}; assert.ok(!doc.author.name); assert.ifError(doc.validateSync()); }); }); it('Single nested subdocs using discriminator can be modified (gh-5693)', function(done) { const eventSchema = new Schema({ message: String }, { discriminatorKey: 'kind', _id: false }); const trackSchema = new Schema({ event: eventSchema }); trackSchema.path('event').discriminator('Clicked', new Schema({ element: String }, { _id: false })); trackSchema.path('event').discriminator('Purchased', new Schema({ product: String }, { _id: false })); const MyModel = db.model('gh5693', trackSchema); const doc = new MyModel({ event: { message: 'Test', kind: 'Clicked', element: 'Amazon Link' } }); doc.save(function(error) { assert.ifError(error); assert.equal(doc.event.message, 'Test'); assert.equal(doc.event.kind, 'Clicked'); assert.equal(doc.event.element, 'Amazon Link'); doc.set('event', { kind: 'Purchased', product: 'Professional AngularJS' }); doc.save(function(error) { assert.ifError(error); assert.equal(doc.event.kind, 'Purchased'); assert.equal(doc.event.product, 'Professional AngularJS'); assert.ok(!doc.event.element); assert.ok(!doc.event.message); done(); }); }); }); it('required function only gets called once (gh-6801)', function() { let reqCount = 0; const childSchema = new Schema({ name: { type: String, required: function() { reqCount++; return true; } } }); const Child = mongoose.model('gh6801_Child', childSchema); const parentSchema = new Schema({ name: String, child: childSchema }); const Parent = mongoose.model('gh6801_Parent', parentSchema); const child = new Child(/* name is required */); const parent = new Parent({ child: child }); return parent.validate().then( () => assert.ok(false), error => { assert.equal(reqCount, 1); assert.ok(error.errors['child.name']); } ); }); it('required function called again after save() (gh-6892)', function() { const schema = new mongoose.Schema({ field: { type: String, default: null, required: function() { return this && this.field === undefined; } } }); const Model = db.model('gh6892', schema); return co(function*() { yield Model.create({}); const doc1 = yield Model.findOne({}).select({_id: 1}); yield doc1.save(); // Should not throw yield Model.create({}); }); }); it('doc array: set then remove (gh-3511)', function(done) { const ItemChildSchema = new mongoose.Schema({ name: { type: String, required: true } }); const ItemParentSchema = new mongoose.Schema({ children: [ItemChildSchema] }); const ItemParent = db.model('gh3511', ItemParentSchema); const p = new ItemParent({ children: [{ name: 'test1' }, { name: 'test2' }] }); p.save(function(error) { assert.ifError(error); ItemParent.findById(p._id, function(error, doc) { assert.ifError(error); assert.ok(doc); assert.equal(doc.children.length, 2); doc.children[1].name = 'test3'; doc.children.remove(doc.children[0]); doc.save(function(error) { assert.ifError(error); ItemParent.findById(doc._id, function(error, doc) { assert.ifError(error); assert.equal(doc.children.length, 1); assert.equal(doc.children[0].name, 'test3'); done(); }); }); }); }); }); it('modifying unselected nested object (gh-5800)', function() { const MainSchema = new mongoose.Schema({ a: { b: {type: String, default: 'some default'}, c: {type: Number, default: 0}, d: {type: String} }, e: {type: String} }); MainSchema.pre('save', function(next) { if (this.isModified()) { this.set('a.c', 100, Number); } next(); }); const Main = db.model('gh5800', MainSchema); const doc = { a: { b: 'not the default', d: 'some value' }, e: 'e' }; return Main.create(doc). then(function(doc) { assert.equal(doc.a.b, 'not the default'); assert.equal(doc.a.d, 'some value'); return Main.findOne().select('e'); }). then(function(doc) { doc.e = 'e modified'; return doc.save(); }). then(function() { return Main.findOne(); }). then(function(doc) { assert.equal(doc.a.b, 'not the default'); assert.equal(doc.a.d, 'some value'); }); }); it('set() underneath embedded discriminator (gh-6482)', function() { const mediaSchema = new Schema({ file: String }, { discriminatorKey: 'kind', _id: false }); const photoSchema = new Schema({ position: String }); const pageSchema = new Schema({ media: mediaSchema }); pageSchema.path('media').discriminator('photo', photoSchema); const Page = db.model('gh6482_Page', pageSchema); return co(function*() { let doc = yield Page.create({ media: { kind: 'photo', file: 'cover.jpg', position: 'left' } }); // Using positional args syntax doc.set('media.position', 'right'); assert.equal(doc.media.position, 'right'); yield doc.save(); doc = yield Page.findById(doc._id); assert.equal(doc.media.position, 'right'); // Using object syntax doc.set({ 'media.position': 'left' }); assert.equal(doc.media.position, 'left'); yield doc.save(); doc = yield Page.findById(doc._id); assert.equal(doc.media.position, 'left'); }); }); it('set() underneath array embedded discriminator (gh-6526)', function() { const mediaSchema = new Schema({ file: String }, { discriminatorKey: 'kind', _id: false }); const photoSchema = new Schema({ position: String }); const pageSchema = new Schema({ media: [mediaSchema] }); pageSchema.path('media').discriminator('photo', photoSchema); const Page = db.model('gh6526_Page', pageSchema); return co(function*() { let doc = yield Page.create({ media: [{ kind: 'photo', file: 'cover.jpg', position: 'left' }] }); // Using positional args syntax doc.set('media.0.position', 'right'); assert.equal(doc.media[0].position, 'right'); yield doc.save(); doc = yield Page.findById(doc._id); assert.equal(doc.media[0].position, 'right'); }); }); it('consistent context for nested docs (gh-5347)', function(done) { const contexts = []; const childSchema = new mongoose.Schema({ phoneNumber: { type: String, required: function() { contexts.push(this); return this.notifications.isEnabled; } }, notifications: { isEnabled: { type: Boolean, required: true } } }); const parentSchema = new mongoose.Schema({ name: String, children: [childSchema] }); const Parent = db.model('gh5347', parentSchema); Parent.create({ name: 'test', children: [ { phoneNumber: '123', notifications: { isEnabled: true } } ] }, function(error, doc) { assert.ifError(error); const child = doc.children.id(doc.children[0]._id); child.phoneNumber = '345'; assert.equal(contexts.length, 1); doc.save(function(error) { assert.ifError(error); assert.equal(contexts.length, 2); assert.ok(contexts[0].toObject().notifications.isEnabled); assert.ok(contexts[1].toObject().notifications.isEnabled); done(); }); }); }); it('accessing arrays in setters on initial document creation (gh-6155)', function(done) { const artistSchema = new mongoose.Schema({ name: { type: String, set: function(v) { const sp = v.split(' '); for (let i = 0; i < sp.length; ++i) { this.keywords.push(sp[i]); } return v; } }, keywords: [String] }); const Artist = db.model('gh6155', artistSchema); const artist = new Artist({ name: 'Motley Crue' }); assert.deepEqual(artist.toObject().keywords, ['Motley', 'Crue']); done(); }); it('handles 2nd level nested field with null child (gh-6187)', function(done) { const NestedSchema = new Schema({ parent: new Schema({ name: String, child: { name: String } }, { strict: false }) }); const NestedModel = db.model('Nested', NestedSchema); const n = new NestedModel({ parent: { name: 'foo', child: null // does not fail if undefined } }); assert.equal(n.parent.name, 'foo'); done(); }); it('does not call default function on init if value set (gh-6410)', function() { let called = 0; function generateRandomID() { called++; return called; } const TestDefaultsWithFunction = db.model('gh6410', new Schema({ randomID: {type: Number, default: generateRandomID} })); const post = new TestDefaultsWithFunction; assert.equal(post.get('randomID'), 1); assert.equal(called, 1); return co(function*() { yield post.save(); yield TestDefaultsWithFunction.findById(post._id); assert.equal(called, 1); }); }); it('convertToFalse and convertToTrue (gh-6758)', function() { const TestSchema = new Schema({ b: Boolean }); const Test = db.model('gh6758', TestSchema); mongoose.Schema.Types.Boolean.convertToTrue.add('aye'); mongoose.Schema.Types.Boolean.convertToFalse.add('nay'); const doc1 = new Test({ b: 'aye' }); const doc2 = new Test({ b: 'nay' }); assert.strictEqual(doc1.b, true); assert.strictEqual(doc2.b, false); return doc1.save(). then(() => Test.findOne({ b: { $exists: 'aye' } })). then(doc => assert.ok(doc)). then(() => { mongoose.Schema.Types.Boolean.convertToTrue.delete('aye'); mongoose.Schema.Types.Boolean.convertToFalse.delete('nay'); }); }); it('doesnt double-call getters when using get() (gh-6779)', function() { const schema = new Schema({ nested: { arr: [{ key: String }] } }); schema.path('nested.arr.0.key').get(v => { return 'foobar' + v; }); const M = db.model('gh6779', schema); const test = new M(); test.nested.arr.push({ key: 'value' }); test.nested.arr.push({ key: 'value2' }); assert.equal(test.get('nested.arr.0.key'), 'foobarvalue'); assert.equal(test.get('nested.arr.1.key'), 'foobarvalue2'); return Promise.resolve(); }); it('returns doubly nested field in inline sub schema when using get() (gh-6925)', function() { const child = new Schema({ nested: { key: String } }); const parent = new Schema({ child: child }); const M = db.model('gh6925', parent); const test = new M({ child: { nested: { key: 'foobarvalue' } } }); assert.equal(test.get('child.nested.key'), 'foobarvalue'); return Promise.resolve(); }); it('defaults should see correct isNew (gh-3793)', function() { let isNew = []; const TestSchema = new mongoose.Schema({ test: { type: Date, default: function() { isNew.push(this.isNew); if (this.isNew) { return Date.now(); } return void 0; } } }); const TestModel = db.model('gh3793', TestSchema); return co(function*() { yield Promise.resolve(db); yield TestModel.collection.insertOne({}); let doc = yield TestModel.findOne({}); assert.strictEqual(doc.test, void 0); assert.deepEqual(isNew, [false]); isNew = []; doc = yield TestModel.create({}); assert.ok(doc.test instanceof Date); assert.deepEqual(isNew, [true]); }); }); it('modify multiple subdoc paths (gh-4405)', function(done) { const ChildObjectSchema = new Schema({ childProperty1: String, childProperty2: String, childProperty3: String }); const ParentObjectSchema = new Schema({ parentProperty1: String, parentProperty2: String, child: ChildObjectSchema }); const Parent = db.model('gh4405', ParentObjectSchema); const p = new Parent({ parentProperty1: 'abc', parentProperty2: '123', child: { childProperty1: 'a', childProperty2: 'b', childProperty3: 'c' } }); p.save(function(error) { assert.ifError(error); Parent.findById(p._id, function(error, p) { assert.ifError(error); p.parentProperty1 = 'foo'; p.parentProperty2 = 'bar'; p.child.childProperty1 = 'ping'; p.child.childProperty2 = 'pong'; p.child.childProperty3 = 'weee'; p.save(function(error) { assert.ifError(error); Parent.findById(p._id, function(error, p) { assert.ifError(error); assert.equal(p.child.childProperty1, 'ping'); assert.equal(p.child.childProperty2, 'pong'); assert.equal(p.child.childProperty3, 'weee'); done(); }); }); }); }); }); it('doesnt try to cast populated embedded docs (gh-6390)', function() { const otherSchema = new Schema({ name: String }); const subSchema = new Schema({ my: String, other: { type: Schema.Types.ObjectId, refPath: 'sub.my' } }); const schema = new Schema({ name: String, sub: subSchema }); const Other = db.model('gh6390', otherSchema); const Test = db.model('6h6390_2', schema); const other = new Other({ name: 'Nicole' }); const test = new Test({ name: 'abc', sub: { my: 'gh6390', other: other._id } }); return co(function* () { yield other.save(); yield test.save(); const doc = yield Test.findOne({}).populate('sub.other'); assert.strictEqual('Nicole', doc.sub.other.name); }); }); }); describe('clobbered Array.prototype', function() { afterEach(function() { delete Array.prototype.remove; }); it('handles clobbered Array.prototype.remove (gh-6431)', function(done) { Object.defineProperty(Array.prototype, 'remove', { value: 42, configurable: true, writable: false }); const schema = new Schema({ arr: [{ name: String }] }); const MyModel = db.model('gh6431', schema); const doc = new MyModel(); assert.deepEqual(doc.toObject().arr, []); done(); }); it('calls array validators again after save (gh-6818)', function() { const schema = new Schema({ roles: { type: [{ name: String, folders: { type: [{ folderId: String }], validate: v => assert.ok(v.length === new Set(v.map(el => el.folderId)).size, 'Duplicate') } }] } }); const Model = db.model('gh6818', schema); return co(function*() { yield Model.create({ roles: [ { name: 'admin' }, { name: 'mod', folders: [{ folderId: 'foo' }] } ] }); const doc = yield Model.findOne(); doc.roles[1].folders.push({ folderId: 'bar' }); yield doc.save(); doc.roles[1].folders[1].folderId = 'foo'; let threw = false; try { yield doc.save(); } catch (error) { threw = true; assert.equal(error.errors['roles.1.folders'].reason.message, 'Duplicate'); } assert.ok(threw); }); }); it('set single nested to num throws ObjectExpectedError (gh-6710) (gh-6753)', function() { const schema = new Schema({ nested: new Schema({ num: Number }) }); const Test = mongoose.model('gh6710', schema); const doc = new Test({ nested: { num: 123 } }); doc.nested = 123; return doc.validate(). then(() => { throw new Error('Should have errored'); }). catch(err => { assert.ok(err.message.indexOf('Cast to Embedded') !== -1, err.message); assert.equal(err.errors['nested'].reason.name, 'ObjectExpectedError'); const doc = new Test({ nested: { num: 123 } }); doc.nested = []; return doc.validate(); }). then(() => { throw new Error('Should have errored'); }). catch(err => { assert.ok(err.message.indexOf('Cast to Embedded') !== -1, err.message); assert.equal(err.errors['nested'].reason.name, 'ObjectExpectedError'); }); }); it('set array to false throws ObjectExpectedError (gh-7242)', function() { const Child = new mongoose.Schema({}); const Parent = new mongoose.Schema({ children: [Child] }); const ParentModel = db.model('gh7242', Parent); const doc = new ParentModel({ children: false }); return doc.save().then( () => assert.ok(false), err => { assert.ok(err.errors['children']); assert.equal(err.errors['children'].name, 'ObjectParameterError'); } ); }); }); it('does not save duplicate items after two saves (gh-6900)', function() { const M = db.model('gh6900', {items: [{name: String}]}); const doc = new M(); doc.items.push({ name: '1' }); return co(function*() { yield doc.save(); doc.items.push({ name: '2' }); yield doc.save(); const found = yield M.findById(doc.id); assert.equal(found.items.length, 2); }); }); it('validateSync() on embedded doc (gh-6931)', function() { const innerSchema = new mongoose.Schema({ innerField: { type: mongoose.Schema.Types.ObjectId, required: true } }); const schema = new mongoose.Schema({ field: { type: mongoose.Schema.Types.ObjectId, required: true }, inner: [innerSchema] }); const Model = db.model('gh6931', schema); return co(function*() { const doc2 = new Model(); doc2.field = mongoose.Types.ObjectId(); doc2.inner.push({ innerField: mongoose.Types.ObjectId() }); doc2.inner[0].innerField = ''; let err = doc2.inner[0].validateSync(); assert.ok(err); assert.ok(err.errors['innerField']); err = yield doc2.inner[0].validate().then(() => assert.ok(false), err => err); assert.ok(err); assert.ok(err.errors['innerField']); }); }); it('retains user-defined key order with nested docs (gh-6944)', function() { const schema = new Schema({ _id: String, foo: String, bar: { a: String } }); const Model = db.model('gh6944', schema); const doc = new Model({ _id: 'test', foo: 'hello', bar: { a: 'world' } }); // Same order as in the initial set above assert.deepEqual(Object.keys(doc._doc), ['_id', 'foo', 'bar']); return Promise.resolve(); }); it('does not mark modified if setting nested subdoc to same value (gh-7048)', function() { const BarSchema = new Schema({ bar: String }, { _id: false }); const FooNestedSchema = new Schema({ foo: BarSchema }); const Model = db.model('gh7048', FooNestedSchema); return co(function*() { const doc = yield Model.create({ foo: { bar: 'test' } }); doc.set({ foo: { bar: 'test' } }); assert.deepEqual(doc.modifiedPaths(), []); doc.set('foo.bar', 'test'); assert.deepEqual(doc.modifiedPaths(), []); }); }); it('allow saving validation error in db (gh-7127)', function() { return co(function*() { const schema = new Schema({ error: mongoose.Schema.Types.Mixed, name: { type: String, required: true } }); const Model = db.model('gh7127', schema); const doc = new Model(); const error = yield doc.validate().catch(error => error); doc.name = 'foo'; doc.error = error; yield doc.save(); const fromDb = yield Model.findOne(); assert.ok(fromDb.error.errors.name); }); }); it('storeSubdocValidationError (gh-6802)', function() { return co(function*() { const GrandchildSchema = new Schema({ name: { type: String, required: true } }, { storeSubdocValidationError: false }); const ChildSchema = new Schema({ name: String, child: GrandchildSchema }, { storeSubdocValidationError: false }); const ParentSchema = new Schema({ name: String, child: ChildSchema }); const Parent = db.model('gh6802', ParentSchema); const parent = new Parent({ child: { child: {} } }); let err = yield parent.validate().then(() => null, err => err); assert.ok(err); assert.ok(err.errors['child.child.name']); assert.ok(!err.errors['child']); assert.ok(!err.errors['child.child']); err = parent.validateSync(); assert.ok(err); assert.ok(err.errors['child.child.name']); assert.ok(!err.errors['child']); assert.ok(!err.errors['child.child']); }); }); it('handles mixed arrays with all syntaxes (gh-7109)', function() { const schema = new Schema({ arr1: [Schema.Types.Mixed], arr2: [{}], arr3: [Object] }); const Test = db.model('gh7109', schema); const test = new Test({ arr1: ['test1', { two: 'three' }, [4, 'five', 6]], arr2: ['test2', { three: 'four' }, [5, 'six', 7]], arr3: ['test3', { four: 'five' }, [6, 'seven', 8]] }); assert.ok(test.validateSync() == null, test.validateSync()); return Promise.resolve(); }); it('supports validator.isUUID as a custom validator (gh-7145)', function() { const schema = new Schema({ name: { type: String, validate: [validator.isUUID, 'invalid name'] } }); const Test = db.model('gh7145', schema); const doc = new Test({ name: 'not-a-uuid' }); const error = doc.validateSync(); assert.ok(error instanceof Error); assert.ok(/invalid name/.test(error.message)); return co(function*() { const error = yield doc.validate().then(() => null, err => err); assert.ok(error instanceof Error); assert.ok(/invalid name/.test(error.message)); }); }); it('propsParameter option (gh-7145)', function() { const schema = new Schema({ name: { type: String, validate: { validator: (v, props) => props.validator != null, propsParameter: true } } }); const Test = db.model('gh7145_0', schema); const doc = new Test({ name: 'foo' }); const error = doc.validateSync(); assert.ok(error == null, error); return co(function*() { const error = yield doc.validate().then(() => null, err => err); assert.ok(error == null, error); }); }); it('surfaces errors in subdoc pre validate (gh-7187)', function() { const InnerSchema = new Schema({ name: String }); InnerSchema.pre('validate', function() { throw new Error('Oops!'); }); const TestSchema = new Schema({ subdocs: [InnerSchema] }); const Test = db.model('gh7187', TestSchema); return Test.create({ subdocs: [{ name: 'foo' }] }).then( () => { throw new Error('Fail'); }, err => { assert.ok(err.message.indexOf('Oops!') !== -1, err.message); } ); }); it('runs setter only once when doing .set() underneath single nested (gh-7196)', function() { let called = []; const InnerSchema = new Schema({ name: String, withSetter: { type: String, set: function(v) { called.push(this); return v; } } }); const TestSchema = new Schema({ nested: InnerSchema }); const Model = db.model('gh7196', TestSchema); const doc = new Model({ nested: { name: 'foo' } }); // Make sure setter only gets called once called = []; doc.set('nested.withSetter', 'bar'); assert.equal(called.length, 1); assert.equal(called[0].name, 'foo'); return Promise.resolve(); }); it('should enable key with dot(.) on mixed types with checkKeys (gh-7144)', function() { const s = new Schema({ raw: { type: Schema.Types.Mixed } }); const M = db.model('gh7144', s); const raw = { 'foo.bar': 'baz' }; return co(function*() { let doc = yield M.create([{ raw: raw }], { checkKeys: false }). then(res => res[0]); assert.deepEqual(doc.raw, raw); doc = yield M.findOneAndUpdate({}, { raw: { 'a.b': 2 } }, { new: true }); assert.deepEqual(doc.raw, { 'a.b': 2 }); }); }); it('doesnt mark array as modified on init if embedded schema has default (gh-7227)', function() { const subSchema = new mongoose.Schema({ users: { type: [{ name: { type: String } }], // This test ensures the whole array won't be modified on init because // of this default default: [{ name: 'test' }] } }); const schema = new mongoose.Schema({ sub: [subSchema] }); const Model = db.model('gh7227', schema); return co(function*() { let doc = new Model({ name: 'test', sub: [{}] }); yield doc.save(); assert.ok(!doc.isModified()); doc = yield Model.findOne(); assert.ok(!doc.isModified()); }); }); it('casts defaults for doc arrays (gh-7337)', function() { const accountSchema = new mongoose.Schema({ roles: { type: [{ otherProperties: { example: Boolean, }, name: String, }], default: function() { return [ { otherProperties: { example: true }, name: 'First' }, { otherProperties: { example: false }, name: 'Second' } ]; } } }); const Account = db.model('gh7337', accountSchema); return co(function*() { yield Account.create({}); const doc = yield Account.findOne(); assert.ok(doc.roles[0]._id); assert.ok(doc.roles[1]._id); }); }); it('updateOne() hooks (gh-7133) (gh-7423)', function() { const schema = new mongoose.Schema({ name: String }); let queryCount = 0; let docCount = 0; let docPostCount = 0; let docRegexCount = 0; let docPostRegexCount = 0; schema.pre('updateOne', () => ++queryCount); schema.pre('updateOne', { document: true, query: false }, () => ++docCount); schema.post('updateOne', { document: true, query: false }, () => ++docPostCount); schema.pre(/^updateOne$/, { document: true, query: false }, () => ++docRegexCount); schema.post(/^updateOne$/, { document: true, query: false }, () => ++docPostRegexCount); let removeCount1 = 0; let removeCount2 = 0; schema.pre('remove', () => ++removeCount1); schema.pre('remove', { document: true, query: false }, () => ++removeCount2); const Model = db.model('gh7133', schema); return co(function*() { const doc = new Model({ name: 'test' }); yield doc.save(); assert.equal(queryCount, 0); assert.equal(docCount, 0); assert.equal(docPostCount, 0); assert.equal(docRegexCount, 0); assert.equal(docPostRegexCount, 0); yield doc.updateOne({ name: 'test2' }); assert.equal(queryCount, 1); assert.equal(docCount, 1); assert.equal(docPostCount, 1); assert.equal(docRegexCount, 1); assert.equal(docPostRegexCount, 1); assert.equal(removeCount1, 0); assert.equal(removeCount2, 0); yield doc.remove(); assert.equal(removeCount1, 1); assert.equal(removeCount2, 1); }); }); it('doesnt mark single nested doc date as modified if setting with string (gh-7264)', function() { const subSchema = new mongoose.Schema({ date2: Date }); const schema = new mongoose.Schema({ date1: Date, sub: subSchema }); const Model = db.model('gh7264', schema); return co(function*() { const date = '2018-11-22T09:00:00.000Z'; const doc = yield Model.create({ date1: date, sub: { date2: date } }); assert.deepEqual(doc.modifiedPaths(), []); doc.set('date1', date); doc.set('sub.date2', date); assert.deepEqual(doc.modifiedPaths(), []); }); }); it('handles null `fields` param to constructor (gh-7271)', function() { const ActivityBareSchema = new Schema({ _id: { type: Schema.Types.ObjectId, ref: 'Activity', }, name: String }); const EventSchema = new Schema({ activity: ActivityBareSchema, name: String }); const data = { name: 'Test', activity: { _id: '5bf606f6471b6056b3f2bfc9', name: 'Activity name' }, }; const Event = db.model('gh7271', EventSchema); const event = new Event(data, null); assert.equal(event.activity.name, 'Activity name'); return event.validate(); }); it('flattenMaps option for toObject() (gh-7274)', function() { let schema = new Schema({ test: { type: Map, of: String, default: new Map() } }, { versionKey: false }); let Test = db.model('gh7274', schema); let mapTest = new Test({}); mapTest.test.set('key1', 'value1'); assert.equal(mapTest.toObject({ flattenMaps: true }).test.key1, 'value1'); schema = new Schema({ test: { type: Map, of: String, default: new Map() } }, { versionKey: false }); schema.set('toObject', { flattenMaps: true }); db.deleteModel('gh7274'); Test = db.model('gh7274', schema); mapTest = new Test({}); mapTest.test.set('key1', 'value1'); assert.equal(mapTest.toObject({}).test.key1, 'value1'); return Promise.resolve(); }); it('`collection` property with strict: false (gh-7276)', function() { const schema = new Schema({}, { strict: false, versionKey: false }); const Model = db.model('gh7276', schema); return co(function*() { let doc = new Model({ test: 'foo', collection: 'bar' }); yield doc.save(); assert.equal(doc.collection, 'bar'); doc = yield Model.findOne(); assert.equal(doc.toObject().collection, 'bar'); }); }); it('should validateSync() all elements in doc array (gh-6746)', function() { const Model = db.model('gh6746', new Schema({ colors: [{ name: { type: String, required: true }, hex: { type: String, required: true } }] })); const model = new Model({ colors: [ { name: 'steelblue' }, { hex: '#4682B4' } ] }); const errors = model.validateSync().errors; const keys = Object.keys(errors).sort(); assert.deepEqual(keys, ['colors.0.hex', 'colors.1.name']); }); it('handles fake constructor (gh-7290)', function() { const TestSchema = new Schema({ test: String }); const TestModel = db.model('gh7290', TestSchema); const badQuery = { test: { length: 1e10, constructor: { name: 'Array' } } }; return co(function*() { let err = yield TestModel.findOne(badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.updateOne(badQuery, { name: 'foo' }). then(() => null, err => err); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.updateOne({}, badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.deleteOne(badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); }); }); it('handles fake __proto__ (gh-7290)', function() { const TestSchema = new Schema({ test: String, name: String }); const TestModel = db.model('gh7290_proto', TestSchema); const badQuery = JSON.parse('{"test":{"length":1000000000,"__proto__":[]}}'); return co(function*() { let err = yield TestModel.findOne(badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.updateOne(badQuery, { name: 'foo' }). then(() => null, err => err); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.updateOne({}, badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); err = yield TestModel.deleteOne(badQuery).then(() => null, e => e); assert.equal(err.name, 'CastError', err.stack); }); }); it('doesnt crash if nested path with `get()` (gh-7316)', function() { const schema = new mongoose.Schema({ http: { get: Number } }); const Model = db.model('gh7316', schema); return Model.create({ http: { get: 400 } }); // Should succeed }); it('copies atomics from existing document array when setting doc array (gh-7472)', function() { const Dog = db.model('gh7472', new mongoose.Schema({ name: String, toys: [{ name: String }] })); return co(function*() { const dog = new Dog({ name: 'Dash' }); dog.toys.push({ name: '1' }); dog.toys.push({ name: '2' }); dog.toys.push({ name: '3' }); yield dog.save(); for (const toy of ['4', '5', '6']) { dog.toys = dog.toys || []; dog.toys.push({ name: toy, count: 1 }); } yield dog.save(); const fromDb = yield Dog.findOne(); assert.deepEqual(fromDb.toys.map(t => t.name), ['1', '2', '3', '4', '5', '6']); }); }); it('doesnt fail with custom update function (gh-7342)', function() { const catalogSchema = new mongoose.Schema({ name: String, sub: new Schema({ name: String }) }, { runSettersOnQuery: true }); catalogSchema.methods.update = function(data) { for (const key in data) { this[key] = data[key]; } return this.save(); }; const Catalog = db.model('gh7342', catalogSchema); return co(function*() { let doc = yield Catalog.create({ name: 'test', sub: { name: 'foo' } }); doc = yield doc.update({ name: 'test2' }); assert.equal(doc.name, 'test2'); }); }); });
1
13,988
The test name as written was grammatically correct, but now it isn't. Both 'MongooseError' and 'instance' should be both singular or both plural.
Automattic-mongoose
js
@@ -125,8 +125,8 @@ public class SmartStoreInspectorActivity extends Activity { private void readExtras() { Bundle bundle = getIntent().getExtras(); - isGlobal = bundle.getBoolean(IS_GLOBAL_STORE, false); - dbName = bundle.getString(DB_NAME, DBOpenHelper.DEFAULT_DB_NAME); + isGlobal = bundle == null ? false : bundle.getBoolean(IS_GLOBAL_STORE, false); + dbName = bundle == null ? DBOpenHelper.DEFAULT_DB_NAME : bundle.getString(DB_NAME, DBOpenHelper.DEFAULT_DB_NAME); } /**
1
/* * Copyright (c) 2014, salesforce.com, inc. * All rights reserved. * Redistribution and use of this software in source and binary forms, with or * without modification, are permitted provided that the following conditions * are met: * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of salesforce.com, inc. nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission of salesforce.com, inc. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.androidsdk.smartstore.ui; import android.app.Activity; import android.app.AlertDialog; import android.content.Intent; import android.os.Bundle; import android.text.SpannableString; import android.text.Spanned; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.GridLayoutAnimationController; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.GridView; import android.widget.MultiAutoCompleteTextView; import android.widget.MultiAutoCompleteTextView.Tokenizer; import com.salesforce.androidsdk.smartstore.R; import com.salesforce.androidsdk.smartstore.app.SalesforceSDKManagerWithSmartStore; import com.salesforce.androidsdk.smartstore.store.DBOpenHelper; import com.salesforce.androidsdk.smartstore.store.QuerySpec; import com.salesforce.androidsdk.smartstore.store.SmartSqlHelper; import com.salesforce.androidsdk.smartstore.store.SmartStore; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.LinkedList; import java.util.List; public class SmartStoreInspectorActivity extends Activity { // Keys for extras bundle private static final String IS_GLOBAL_STORE = "isGlobalStore"; private static final String DB_NAME = "dbName"; // Default page size / index private static final int DEFAULT_PAGE_SIZE = 10; private static final int DEFAULT_PAGE_INDEX = 0; // Store private String dbName; private boolean isGlobal; private SmartStore smartStore; // View elements private MultiAutoCompleteTextView queryText; private EditText pageSizeText; private EditText pageIndexText; private GridView resultGrid; // Test support private String lastAlertTitle; private String lastAlertMessage; private JSONArray lastResults; /** * Create intent to bring up inspector * @param parentActivity * @param isGlobal pass true to get an inspector for the default global smartstore * pass false to get an inspector for the default user smartstore * @param dbName * @return */ public static Intent getIntent(Activity parentActivity, boolean isGlobal, String dbName) { final Bundle bundle = new Bundle(); bundle.putBoolean(IS_GLOBAL_STORE, isGlobal); bundle.putString(DB_NAME, dbName); final Intent intent = new Intent(parentActivity, SmartStoreInspectorActivity.class); intent.putExtras(bundle); return intent; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); readExtras(); setContentView(R.layout.sf__inspector); queryText = (MultiAutoCompleteTextView) findViewById(R.id.sf__inspector_query_text); pageSizeText = (EditText) findViewById(R.id.sf__inspector_pagesize_text); pageIndexText = (EditText) findViewById(R.id.sf__inspector_pageindex_text); resultGrid = (GridView) findViewById(R.id.sf__inspector_result_grid); } @Override protected void onResume() { super.onResume(); final SalesforceSDKManagerWithSmartStore manager = SalesforceSDKManagerWithSmartStore.getInstance(); smartStore = isGlobal ? manager.getGlobalSmartStore(dbName) : manager.getSmartStore(dbName, manager.getUserAccountManager().getCurrentUser(), null); setupAutocomplete(queryText); } private void readExtras() { Bundle bundle = getIntent().getExtras(); isGlobal = bundle.getBoolean(IS_GLOBAL_STORE, false); dbName = bundle.getString(DB_NAME, DBOpenHelper.DEFAULT_DB_NAME); } /** * Called when "Clear" button is clicked * * @param v */ public void onClearClick(View v) { reset(); } /** * Reset activity to its original state */ public void reset() { queryText.setText(""); pageSizeText.setText(""); pageIndexText.setText(""); resultGrid.setAdapter(null); lastAlertTitle = null; lastAlertMessage = null; lastResults = null; } /** * @return title of last alert shown (used by tests) */ public String getLastAlertTitle() { return lastAlertTitle; } /** * @return message of last alert shown (used by tests) */ public String getLastAlertMessage() { return lastAlertMessage; } /** * @return last results shown (used by tests) */ public JSONArray getLastResults() { return lastResults; } /** * Called when "Run" button is clicked * * @param v */ public void onRunClick(View v) { runQuery(); } /** * Called when "Soups" button is clicked * * @param v */ public void onSoupsClick(View v) { List<String> names = smartStore.getAllSoupNames(); if (names.size() > 10) { queryText.setText(getString(R.string.sf__inspector_soups_query)); } else { StringBuilder sb = new StringBuilder(); boolean first = true; for (String name : names) { if (!first) sb.append(" union "); sb.append("select '"); sb.append(name); sb.append("', count(*) from {"); sb.append(name); sb.append("}"); first = false; } queryText.setText(sb.toString()); } runQuery(); } /** * Called when "Indices" button is clicked * * @param v */ public void onIndicesClick(View v) { queryText .setText(getString(R.string.sf__inspector_indices_query)); runQuery(); } /** * Helper method that builds query spec from typed query, runs it and * updates result grid */ private void runQuery() { try { String query = queryText.getText().toString(); if (query.length() == 0) { showAlert(null, getString(R.string.sf__inspector_no_query_specified)); return; } int pageSize = getInt(pageSizeText, DEFAULT_PAGE_SIZE); int pageIndex = getInt(pageIndexText, DEFAULT_PAGE_INDEX); QuerySpec querySpec = QuerySpec .buildSmartQuerySpec(query, pageSize); showResult(smartStore.query(querySpec, pageIndex)); } catch (Exception e) { showAlert(e.getClass().getSimpleName(), e.getMessage()); } } /** * Helper function to get integer typed in a text field Returns defaultValue * if no integer were typed * * @param textField * @param defaultValue * @return */ private int getInt(EditText textField, int defaultValue) { String s = textField.getText().toString(); if (s.length() == 0) { return defaultValue; } else { return Integer.parseInt(s); } } /** * Helper method to show an alert * * @param e */ private void showAlert(String title, String message) { lastAlertTitle = title; lastAlertMessage = message; new AlertDialog.Builder(this).setTitle(title) .setMessage(message).show(); } /** * Helper method to populate result grid with query result set (expected to * be a JSONArray of JSONArray's) * * @param result * @throws JSONException */ private void showResult(JSONArray result) throws JSONException { lastResults = result; ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, R.layout.sf__inspector_result_cell); if (result.length() == 0) { showAlert(null, getString(R.string.sf__inspector_no_rows_returned)); } for (int j = 0; j < result.length(); j++) { JSONArray row = result.getJSONArray(j); for (int i = 0; i < row.length(); i++) { Object val = row.get(i); adapter.add(val instanceof JSONObject ? ((JSONObject) val) .toString(2) : val.toString()); } } int numColumns = (result.length() > 0 ? result.getJSONArray(0).length() : 0); resultGrid.setNumColumns(numColumns); resultGrid.setAdapter(adapter); animateGridView(resultGrid); } /** * Helper method to attach animation to grid view * * @param gridView */ private void animateGridView(GridView gridView) { Animation animation = AnimationUtils.loadAnimation(this, android.R.anim.fade_in); GridLayoutAnimationController animationController = new GridLayoutAnimationController( animation, 0f, 0.1f); gridView.setLayoutAnimation(animationController); animationController.start(); } /** * Helper method to setup auto-complete for query input field * * @param textView */ private void setupAutocomplete(MultiAutoCompleteTextView textView) { ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_dropdown_item_1line); // Adding {soupName} and {soupName:specialField} List<String> names = new LinkedList<String>(); names.addAll(smartStore.getAllSoupNames()); for (String name : names) { adapter.add("{" + name + "}"); adapter.add("{" + name + ":" + SmartSqlHelper.SOUP + "}"); adapter.add("{" + name + ":" + SmartStore.SOUP_ENTRY_ID + "}"); adapter.add("{" + name + ":" + SmartStore.SOUP_LAST_MODIFIED_DATE + "}"); } // Adding {soupName:indexedPath} try { JSONArray result = smartStore.query(QuerySpec.buildSmartQuerySpec( "SELECT soupName, path FROM soup_index_map", 1000), 0); for (int j = 0; j < result.length(); j++) { JSONArray row = result.getJSONArray(j); adapter.add("{" + row.getString(0) + ":" + row.getString(1) + "}"); } } catch (JSONException e) { Log.e("SmartStoreInspector", "getIndices", e); } // Adding some SQL keywords adapter.add("select"); adapter.add("from"); adapter.add("where"); adapter.add("order by"); adapter.add("asc"); adapter.add("desc"); adapter.add("group by"); textView.setAdapter(adapter); textView.setTokenizer(new QueryTokenizer()); } } /** * Tokenized used by query auto-complete field * * @author wmathurin * */ class QueryTokenizer implements Tokenizer { public int findTokenStart(CharSequence text, int cursor) { int i = cursor; while (i > 0 && text.charAt(i - 1) != ' ') { i--; } return i; } public int findTokenEnd(CharSequence text, int cursor) { int i = cursor; int len = text.length(); while (i < len) { if (text.charAt(i) == ' ') { return i; } else { i++; } } return len; } public CharSequence terminateToken(CharSequence text) { int i = text.length(); while (i > 0 && text.charAt(i - 1) == ' ') { i--; } if (i > 0 && text.charAt(i - 1) == ' ') { return text; } else { if (text instanceof Spanned) { SpannableString sp = new SpannableString(text + " "); TextUtils.copySpansFrom((Spanned) text, 0, text.length(), Object.class, sp, 0); return sp; } else { return text; } } } }
1
14,765
SmartStoreInspectorTest was throwing a NPE here
forcedotcom-SalesforceMobileSDK-Android
java
@@ -134,9 +134,13 @@ public abstract class DeleteFilter<T> { Iterable<CloseableIterable<Record>> deleteRecords = Iterables.transform(deletes, delete -> openDeletes(delete, deleteSchema)); - StructLikeSet deleteSet = Deletes.toEqualitySet( - // copy the delete records because they will be held in a set - CloseableIterable.transform(CloseableIterable.concat(deleteRecords), Record::copy), + + // copy the delete records because they will be held in a set + CloseableIterable<Record> records = CloseableIterable.transform(CloseableIterable.concat(deleteRecords), + Record::copy); + + StructLikeSet deleteSet = Deletes.toEqualitySet(CloseableIterable.transform(records, + record -> new InternalRecordWrapper(deleteSchema.asStruct()).wrap(record)), deleteSchema.asStruct()); Predicate<T> isInDeleteSet = record -> deleteSet.contains(projectRow.wrap(asStructLike(record)));
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.data; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Predicate; import org.apache.iceberg.Accessor; import org.apache.iceberg.DataFile; import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileContent; import org.apache.iceberg.FileScanTask; import org.apache.iceberg.MetadataColumns; import org.apache.iceberg.Schema; import org.apache.iceberg.StructLike; import org.apache.iceberg.avro.Avro; import org.apache.iceberg.data.avro.DataReader; import org.apache.iceberg.data.parquet.GenericParquetReaders; import org.apache.iceberg.deletes.Deletes; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.io.InputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.relocated.com.google.common.collect.Multimap; import org.apache.iceberg.relocated.com.google.common.collect.Multimaps; import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.Filter; import org.apache.iceberg.util.StructLikeSet; import org.apache.iceberg.util.StructProjection; import org.apache.parquet.Preconditions; public abstract class DeleteFilter<T> { private static final long DEFAULT_SET_FILTER_THRESHOLD = 100_000L; private static final Schema POS_DELETE_SCHEMA = new Schema( MetadataColumns.DELETE_FILE_PATH, MetadataColumns.DELETE_FILE_POS); private final long setFilterThreshold; private final DataFile dataFile; private final List<DeleteFile> posDeletes; private final List<DeleteFile> eqDeletes; private final Schema requiredSchema; private final Accessor<StructLike> posAccessor; protected DeleteFilter(FileScanTask task, Schema tableSchema, Schema requestedSchema) { this.setFilterThreshold = DEFAULT_SET_FILTER_THRESHOLD; this.dataFile = task.file(); ImmutableList.Builder<DeleteFile> posDeleteBuilder = ImmutableList.builder(); ImmutableList.Builder<DeleteFile> eqDeleteBuilder = ImmutableList.builder(); for (DeleteFile delete : task.deletes()) { switch (delete.content()) { case POSITION_DELETES: posDeleteBuilder.add(delete); break; case EQUALITY_DELETES: eqDeleteBuilder.add(delete); break; default: throw new UnsupportedOperationException("Unknown delete file content: " + delete.content()); } } this.posDeletes = posDeleteBuilder.build(); this.eqDeletes = eqDeleteBuilder.build(); this.requiredSchema = fileProjection(tableSchema, requestedSchema, posDeletes, eqDeletes); this.posAccessor = requiredSchema.accessorForField(MetadataColumns.ROW_POSITION.fieldId()); } public Schema requiredSchema() { return requiredSchema; } Accessor<StructLike> posAccessor() { return posAccessor; } protected abstract StructLike asStructLike(T record); protected abstract InputFile getInputFile(String location); protected long pos(T record) { return (Long) posAccessor.get(asStructLike(record)); } public CloseableIterable<T> filter(CloseableIterable<T> records) { return applyEqDeletes(applyPosDeletes(records)); } private List<Predicate<T>> applyEqDeletes() { List<Predicate<T>> isInDeleteSets = Lists.newArrayList(); if (eqDeletes.isEmpty()) { return isInDeleteSets; } Multimap<Set<Integer>, DeleteFile> filesByDeleteIds = Multimaps.newMultimap(Maps.newHashMap(), Lists::newArrayList); for (DeleteFile delete : eqDeletes) { filesByDeleteIds.put(Sets.newHashSet(delete.equalityFieldIds()), delete); } for (Map.Entry<Set<Integer>, Collection<DeleteFile>> entry : filesByDeleteIds.asMap().entrySet()) { Set<Integer> ids = entry.getKey(); Iterable<DeleteFile> deletes = entry.getValue(); Schema deleteSchema = TypeUtil.select(requiredSchema, ids); // a projection to select and reorder fields of the file schema to match the delete rows StructProjection projectRow = StructProjection.create(requiredSchema, deleteSchema); Iterable<CloseableIterable<Record>> deleteRecords = Iterables.transform(deletes, delete -> openDeletes(delete, deleteSchema)); StructLikeSet deleteSet = Deletes.toEqualitySet( // copy the delete records because they will be held in a set CloseableIterable.transform(CloseableIterable.concat(deleteRecords), Record::copy), deleteSchema.asStruct()); Predicate<T> isInDeleteSet = record -> deleteSet.contains(projectRow.wrap(asStructLike(record))); isInDeleteSets.add(isInDeleteSet); } return isInDeleteSets; } public CloseableIterable<T> findEqualityDeleteRows(CloseableIterable<T> records) { // Predicate to test whether a row has been deleted by equality deletions. Predicate<T> deletedRows = applyEqDeletes().stream() .reduce(Predicate::or) .orElse(t -> false); Filter<T> deletedRowsFilter = new Filter<T>() { @Override protected boolean shouldKeep(T item) { return deletedRows.test(item); } }; return deletedRowsFilter.filter(records); } private CloseableIterable<T> applyEqDeletes(CloseableIterable<T> records) { // Predicate to test whether a row should be visible to user after applying equality deletions. Predicate<T> remainingRows = applyEqDeletes().stream() .map(Predicate::negate) .reduce(Predicate::and) .orElse(t -> true); Filter<T> remainingRowsFilter = new Filter<T>() { @Override protected boolean shouldKeep(T item) { return remainingRows.test(item); } }; return remainingRowsFilter.filter(records); } private CloseableIterable<T> applyPosDeletes(CloseableIterable<T> records) { if (posDeletes.isEmpty()) { return records; } List<CloseableIterable<Record>> deletes = Lists.transform(posDeletes, this::openPosDeletes); // if there are fewer deletes than a reasonable number to keep in memory, use a set if (posDeletes.stream().mapToLong(DeleteFile::recordCount).sum() < setFilterThreshold) { return Deletes.filter( records, this::pos, Deletes.toPositionSet(dataFile.path(), CloseableIterable.concat(deletes))); } return Deletes.streamingFilter(records, this::pos, Deletes.deletePositions(dataFile.path(), deletes)); } private CloseableIterable<Record> openPosDeletes(DeleteFile file) { return openDeletes(file, POS_DELETE_SCHEMA); } private CloseableIterable<Record> openDeletes(DeleteFile deleteFile, Schema deleteSchema) { InputFile input = getInputFile(deleteFile.path().toString()); switch (deleteFile.format()) { case AVRO: return Avro.read(input) .project(deleteSchema) .reuseContainers() .createReaderFunc(DataReader::create) .build(); case PARQUET: Parquet.ReadBuilder builder = Parquet.read(input) .project(deleteSchema) .reuseContainers() .createReaderFunc(fileSchema -> GenericParquetReaders.buildReader(deleteSchema, fileSchema)); if (deleteFile.content() == FileContent.POSITION_DELETES) { builder.filter(Expressions.equal(MetadataColumns.DELETE_FILE_PATH.name(), dataFile.path())); } return builder.build(); case ORC: default: throw new UnsupportedOperationException(String.format( "Cannot read deletes, %s is not a supported format: %s", deleteFile.format().name(), deleteFile.path())); } } private static Schema fileProjection(Schema tableSchema, Schema requestedSchema, List<DeleteFile> posDeletes, List<DeleteFile> eqDeletes) { if (posDeletes.isEmpty() && eqDeletes.isEmpty()) { return requestedSchema; } Set<Integer> requiredIds = Sets.newLinkedHashSet(); if (!posDeletes.isEmpty()) { requiredIds.add(MetadataColumns.ROW_POSITION.fieldId()); } for (DeleteFile eqDelete : eqDeletes) { requiredIds.addAll(eqDelete.equalityFieldIds()); } requiredIds.add(MetadataColumns.IS_DELETED.fieldId()); Set<Integer> missingIds = Sets.newLinkedHashSet( Sets.difference(requiredIds, TypeUtil.getProjectedIds(requestedSchema))); if (missingIds.isEmpty()) { return requestedSchema; } // TODO: support adding nested columns. this will currently fail when finding nested columns to add List<Types.NestedField> columns = Lists.newArrayList(requestedSchema.columns()); for (int fieldId : missingIds) { if (fieldId == MetadataColumns.ROW_POSITION.fieldId() || fieldId == MetadataColumns.IS_DELETED.fieldId()) { continue; // add _pos and _deleted at the end } Types.NestedField field = tableSchema.asStruct().field(fieldId); Preconditions.checkArgument(field != null, "Cannot find required field for ID %s", fieldId); columns.add(field); } if (missingIds.contains(MetadataColumns.ROW_POSITION.fieldId())) { columns.add(MetadataColumns.ROW_POSITION); } if (missingIds.contains(MetadataColumns.IS_DELETED.fieldId())) { columns.add(MetadataColumns.IS_DELETED); } return new Schema(columns); } }
1
42,111
We prefer line wrapping that keeps arguments to the same method aligned rather than aligning arguments to different levels. Here, the lambda to create an `InternalRecordWrapper` is an argument to `transform`, but it is aligned with `deleteSchema.asStruct()` that is an argument to the outer `toEqualitySet` call. Instead, can you add a newline for each argument to `toEqualitySet`? If the line with the lambda is too long, then you can also add a newline for it that it indented from the start of the line with `transform`, so it is clear that it is an argument to `transform` and not `toEqualitySet`.
apache-iceberg
java
@@ -173,6 +173,10 @@ import ServerConnections from '../ServerConnections'; value: session.TranscodingInfo.TranscodeReasons.map(translateReason).join('<br/>') }); } + sessionStats.push({ + label: globalize.translate('LabelHardwareEncoding'), + value: session.TranscodingInfo.HardwareAccelerationType + }); } return sessionStats;
1
import { Events } from 'jellyfin-apiclient'; import '../../elements/emby-button/paper-icon-button-light'; import globalize from '../../scripts/globalize'; import layoutManager from '../layoutManager'; import { playbackManager } from '../playback/playbackmanager'; import playMethodHelper from '../playback/playmethodhelper'; import SyncPlay from '../../components/syncPlay/core'; import './playerstats.scss'; import ServerConnections from '../ServerConnections'; /* eslint-disable indent */ function init(instance) { const parent = document.createElement('div'); parent.classList.add('playerStats'); if (layoutManager.tv) { parent.classList.add('playerStats-tv'); } parent.classList.add('hide'); let button; if (layoutManager.tv) { button = ''; } else { button = '<button type="button" is="paper-icon-button-light" class="playerStats-closeButton"><span class="material-icons close"></span></button>'; } const contentClass = layoutManager.tv ? 'playerStats-content playerStats-content-tv' : 'playerStats-content'; parent.innerHTML = '<div class="' + contentClass + '">' + button + '<div class="playerStats-stats"></div></div>'; button = parent.querySelector('.playerStats-closeButton'); if (button) { button.addEventListener('click', onCloseButtonClick.bind(instance)); } document.body.appendChild(parent); instance.element = parent; } function onCloseButtonClick() { this.enabled(false); } function renderStats(elem, categories) { elem.querySelector('.playerStats-stats').innerHTML = categories.map(function (category) { let categoryHtml = ''; const stats = category.stats; if (stats.length && category.name) { categoryHtml += '<div class="playerStats-stat playerStats-stat-header">'; categoryHtml += '<div class="playerStats-stat-label">'; categoryHtml += category.name; categoryHtml += '</div>'; categoryHtml += '<div class="playerStats-stat-value">'; categoryHtml += category.subText || ''; categoryHtml += '</div>'; categoryHtml += '</div>'; } for (let i = 0, length = stats.length; i < length; i++) { categoryHtml += '<div class="playerStats-stat">'; const stat = stats[i]; categoryHtml += '<div class="playerStats-stat-label">'; categoryHtml += stat.label; categoryHtml += '</div>'; categoryHtml += '<div class="playerStats-stat-value">'; categoryHtml += stat.value; categoryHtml += '</div>'; categoryHtml += '</div>'; } return categoryHtml; }).join(''); } function getSession(instance, player) { const now = new Date().getTime(); if ((now - (instance.lastSessionTime || 0)) < 10000) { return Promise.resolve(instance.lastSession); } const apiClient = ServerConnections.getApiClient(playbackManager.currentItem(player).ServerId); return apiClient.getSessions({ deviceId: apiClient.deviceId() }).then(function (sessions) { instance.lastSession = sessions[0] || {}; instance.lastSessionTime = new Date().getTime(); return Promise.resolve(instance.lastSession); }, function () { return Promise.resolve({}); }); } function translateReason(reason) { return globalize.translate('' + reason); } function getTranscodingStats(session, player, displayPlayMethod) { const sessionStats = []; let videoCodec; let audioCodec; let totalBitrate; let audioChannels; if (session.TranscodingInfo) { videoCodec = session.TranscodingInfo.VideoCodec; audioCodec = session.TranscodingInfo.AudioCodec; totalBitrate = session.TranscodingInfo.Bitrate; audioChannels = session.TranscodingInfo.AudioChannels; } if (videoCodec) { sessionStats.push({ label: globalize.translate('LabelVideoCodec'), value: session.TranscodingInfo.IsVideoDirect ? (videoCodec.toUpperCase() + ' (direct)') : videoCodec.toUpperCase() }); } if (audioCodec) { sessionStats.push({ label: globalize.translate('LabelAudioCodec'), value: session.TranscodingInfo.IsAudioDirect ? (audioCodec.toUpperCase() + ' (direct)') : audioCodec.toUpperCase() }); } if (displayPlayMethod === 'Transcode') { if (audioChannels) { sessionStats.push({ label: globalize.translate('LabelAudioChannels'), value: audioChannels }); } if (totalBitrate) { sessionStats.push({ label: globalize.translate('LabelBitrate'), value: getDisplayBitrate(totalBitrate) }); } if (session.TranscodingInfo.CompletionPercentage) { sessionStats.push({ label: globalize.translate('LabelTranscodingProgress'), value: session.TranscodingInfo.CompletionPercentage.toFixed(1) + '%' }); } if (session.TranscodingInfo.Framerate) { sessionStats.push({ label: globalize.translate('LabelTranscodingFramerate'), value: session.TranscodingInfo.Framerate + ' fps' }); } if (session.TranscodingInfo.TranscodeReasons && session.TranscodingInfo.TranscodeReasons.length) { sessionStats.push({ label: globalize.translate('LabelReasonForTranscoding'), value: session.TranscodingInfo.TranscodeReasons.map(translateReason).join('<br/>') }); } } return sessionStats; } function getDisplayBitrate(bitrate) { if (bitrate > 1000000) { return (bitrate / 1000000).toFixed(1) + ' Mbps'; } else { return Math.floor(bitrate / 1000) + ' kbps'; } } function getReadableSize(size) { if (size >= 1073741824) { return parseFloat((size / 1073741824).toFixed(1)) + ' GiB'; } else if (size >= 1048576) { return parseFloat((size / 1048576).toFixed(1)) + ' MiB'; } else { return Math.floor(size / 1024) + ' KiB'; } } function getMediaSourceStats(session, player) { const sessionStats = []; const mediaSource = playbackManager.currentMediaSource(player) || {}; const totalBitrate = mediaSource.Bitrate; const mediaFileSize = mediaSource.Size; if (mediaSource.Container) { sessionStats.push({ label: globalize.translate('LabelProfileContainer'), value: mediaSource.Container }); } if (mediaFileSize) { sessionStats.push({ label: globalize.translate('LabelSize'), value: getReadableSize(mediaFileSize) }); } if (totalBitrate) { sessionStats.push({ label: globalize.translate('LabelBitrate'), value: getDisplayBitrate(totalBitrate) }); } const mediaStreams = mediaSource.MediaStreams || []; const videoStream = mediaStreams.filter(function (s) { return s.Type === 'Video'; })[0] || {}; const videoCodec = videoStream.Codec; const audioStreamIndex = playbackManager.getAudioStreamIndex(player); const audioStream = playbackManager.audioTracks(player).filter(function (s) { return s.Type === 'Audio' && s.Index === audioStreamIndex; })[0] || {}; const audioCodec = audioStream.Codec; const audioChannels = audioStream.Channels; const videoInfos = []; if (videoCodec) { videoInfos.push(videoCodec.toUpperCase()); } if (videoStream.Profile) { videoInfos.push(videoStream.Profile); } if (videoInfos.length) { sessionStats.push({ label: globalize.translate('LabelVideoCodec'), value: videoInfos.join(' ') }); } if (videoStream.BitRate) { sessionStats.push({ label: globalize.translate('LabelVideoBitrate'), value: getDisplayBitrate(videoStream.BitRate) }); } if (videoStream.VideoRange) { sessionStats.push({ label: globalize.translate('LabelVideoRange'), value: videoStream.VideoRange }); } if (videoStream.ColorSpace) { sessionStats.push({ label: globalize.translate('LabelColorSpace'), value: videoStream.ColorSpace }); } if (videoStream.ColorTransfer) { sessionStats.push({ label: globalize.translate('LabelColorTransfer'), value: videoStream.ColorTransfer }); } if (videoStream.ColorPrimaries) { sessionStats.push({ label: globalize.translate('LabelColorPrimaries'), value: videoStream.ColorPrimaries }); } const audioInfos = []; if (audioCodec) { audioInfos.push(audioCodec.toUpperCase()); } if (audioStream.Profile) { audioInfos.push(audioStream.Profile); } if (audioInfos.length) { sessionStats.push({ label: globalize.translate('LabelAudioCodec'), value: audioInfos.join(' ') }); } if (audioStream.BitRate) { sessionStats.push({ label: globalize.translate('LabelAudioBitrate'), value: getDisplayBitrate(audioStream.BitRate) }); } if (audioChannels) { sessionStats.push({ label: globalize.translate('LabelAudioChannels'), value: audioChannels }); } if (audioStream.SampleRate) { sessionStats.push({ label: globalize.translate('LabelAudioSampleRate'), value: audioStream.SampleRate + ' Hz' }); } if (audioStream.BitDepth) { sessionStats.push({ label: globalize.translate('LabelAudioBitDepth'), value: audioStream.BitDepth }); } return sessionStats; } function getSyncPlayStats() { const syncStats = []; const stats = SyncPlay.Manager.getStats(); syncStats.push({ label: globalize.translate('LabelSyncPlayTimeSyncDevice'), value: stats.TimeSyncDevice }); syncStats.push({ // TODO: clean old string 'LabelSyncPlayTimeOffset' from translations. label: globalize.translate('LabelSyncPlayTimeSyncOffset'), value: stats.TimeSyncOffset + ' ' + globalize.translate('MillisecondsUnit') }); syncStats.push({ label: globalize.translate('LabelSyncPlayPlaybackDiff'), value: stats.PlaybackDiff + ' ' + globalize.translate('MillisecondsUnit') }); syncStats.push({ label: globalize.translate('LabelSyncPlaySyncMethod'), value: stats.SyncMethod }); return syncStats; } function getStats(instance, player) { const statsPromise = player.getStats ? player.getStats() : Promise.resolve({}); const sessionPromise = getSession(instance, player); return Promise.all([statsPromise, sessionPromise]).then(function (responses) { const playerStatsResult = responses[0]; const playerStats = playerStatsResult.categories || []; const session = responses[1]; const displayPlayMethod = playMethodHelper.getDisplayPlayMethod(session); let localizedDisplayMethod = displayPlayMethod; if (displayPlayMethod === 'DirectPlay') { localizedDisplayMethod = globalize.translate('DirectPlaying'); } else if (displayPlayMethod === 'Remux') { localizedDisplayMethod = globalize.translate('Remuxing'); } else if (displayPlayMethod === 'DirectStream') { localizedDisplayMethod = globalize.translate('DirectStreaming'); } else if (displayPlayMethod === 'Transcode') { localizedDisplayMethod = globalize.translate('Transcoding'); } const baseCategory = { stats: [], name: globalize.translate('LabelPlaybackInfo') }; baseCategory.stats.unshift({ label: globalize.translate('LabelPlayMethod'), value: localizedDisplayMethod }); baseCategory.stats.unshift({ label: globalize.translate('LabelPlayer'), value: player.name }); const categories = []; categories.push(baseCategory); for (let i = 0, length = playerStats.length; i < length; i++) { const category = playerStats[i]; if (category.type === 'audio') { category.name = globalize.translate('LabelAudioInfo'); } else if (category.type === 'video') { category.name = globalize.translate('LabelVideoInfo'); } categories.push(category); } let localizedTranscodingInfo = globalize.translate('LabelTranscodingInfo'); if (displayPlayMethod === 'Remux') { localizedTranscodingInfo = globalize.translate('LabelRemuxingInfo'); } else if (displayPlayMethod === 'DirectStream') { localizedTranscodingInfo = globalize.translate('LabelDirectStreamingInfo'); } if (session.TranscodingInfo) { categories.push({ stats: getTranscodingStats(session, player, displayPlayMethod), name: localizedTranscodingInfo }); } categories.push({ stats: getMediaSourceStats(session, player), name: globalize.translate('LabelOriginalMediaInfo') }); const apiClient = ServerConnections.getApiClient(playbackManager.currentItem(player).ServerId); if (SyncPlay.Manager.isSyncPlayEnabled() && apiClient.isMinServerVersion('10.6.0')) { categories.push({ stats: getSyncPlayStats(), name: globalize.translate('LabelSyncPlayInfo') }); } return Promise.resolve(categories); }); } function renderPlayerStats(instance, player) { const now = new Date().getTime(); if ((now - (instance.lastRender || 0)) < 700) { return; } instance.lastRender = now; getStats(instance, player).then(function (stats) { const elem = instance.element; if (!elem) { return; } renderStats(elem, stats); }); } function bindEvents(instance, player) { const localOnTimeUpdate = function () { renderPlayerStats(instance, player); }; instance.onTimeUpdate = localOnTimeUpdate; Events.on(player, 'timeupdate', localOnTimeUpdate); } function unbindEvents(instance, player) { const localOnTimeUpdate = instance.onTimeUpdate; if (localOnTimeUpdate) { Events.off(player, 'timeupdate', localOnTimeUpdate); } } class PlayerStats { constructor(options) { this.options = options; init(this); this.enabled(true); } enabled(enabled) { if (enabled == null) { return this._enabled; } const options = this.options; if (!options) { return; } this._enabled = enabled; if (enabled) { this.element.classList.remove('hide'); bindEvents(this, options.player); } else { this.element.classList.add('hide'); unbindEvents(this, options.player); } } toggle() { this.enabled(!this.enabled()); } destroy() { const options = this.options; if (options) { this.options = null; unbindEvents(this, options.player); } const elem = this.element; if (elem) { elem.parentNode.removeChild(elem); this.element = null; } } } /* eslint-enable indent */ export default PlayerStats;
1
19,345
I think it would be a good idea to wrap this in an if like the stats above, just in case.
jellyfin-jellyfin-web
js
@@ -289,6 +289,9 @@ public class LFMainActivity extends SharedMediaActivity { mediaAdapter.notifyItemChanged(toggleSelectPhoto(m)); editMode = true; } + else { + selectAllPhotosUpToFav(getImagePosition(m.getPath())); + } } else selectAllPhotosUpTo(getImagePosition(m.getPath()), mediaAdapter); return true; }
1
package org.fossasia.phimpme.gallery.activities; import android.animation.Animator; import android.annotation.TargetApi; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.ColorStateList; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.Drawable; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.design.widget.AppBarLayout; import android.support.design.widget.BottomNavigationView; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityOptionsCompat; import android.support.v4.content.ContextCompat; import android.support.v4.view.GravityCompat; import android.support.v4.view.MenuItemCompat; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AlertDialog; import android.support.v7.widget.CardView; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.Html; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.View; import android.view.ViewAnimationUtils; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.webkit.MimeTypeMap; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.ScrollView; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.TextView; import com.bumptech.glide.gifencoder.AnimatedGifEncoder; import com.mikepenz.google_material_typeface_library.GoogleMaterial; import com.mikepenz.iconics.view.IconicsImageView; import org.fossasia.phimpme.R; import org.fossasia.phimpme.base.SharedMediaActivity; import org.fossasia.phimpme.data.local.FavouriteImagesModel; import org.fossasia.phimpme.data.local.ImageDescModel; import org.fossasia.phimpme.data.local.TrashBinRealmModel; import org.fossasia.phimpme.data.local.UploadHistoryRealmModel; import org.fossasia.phimpme.gallery.SelectAlbumBottomSheet; import org.fossasia.phimpme.gallery.adapters.AlbumsAdapter; import org.fossasia.phimpme.gallery.adapters.MediaAdapter; import org.fossasia.phimpme.gallery.data.Album; import org.fossasia.phimpme.gallery.data.CustomAlbumsHelper; import org.fossasia.phimpme.gallery.data.HandlingAlbums; import org.fossasia.phimpme.gallery.data.Media; import org.fossasia.phimpme.gallery.data.base.ImageFileFilter; import org.fossasia.phimpme.gallery.data.base.MediaComparators; import org.fossasia.phimpme.gallery.data.base.SortingMode; import org.fossasia.phimpme.gallery.data.base.SortingOrder; import org.fossasia.phimpme.gallery.data.providers.MediaStoreProvider; import org.fossasia.phimpme.gallery.data.providers.StorageProvider; import org.fossasia.phimpme.gallery.util.Affix; import org.fossasia.phimpme.gallery.util.AlertDialogsHelper; import org.fossasia.phimpme.gallery.util.ContentHelper; import org.fossasia.phimpme.gallery.util.Measure; import org.fossasia.phimpme.gallery.util.PreferenceUtil; import org.fossasia.phimpme.gallery.util.SecurityHelper; import org.fossasia.phimpme.gallery.util.StringUtils; import org.fossasia.phimpme.gallery.views.CustomScrollBarRecyclerView; import org.fossasia.phimpme.gallery.views.GridSpacingItemDecoration; import org.fossasia.phimpme.trashbin.TrashBinActivity; import org.fossasia.phimpme.uploadhistory.UploadHistory; import org.fossasia.phimpme.utilities.ActivitySwitchHelper; import org.fossasia.phimpme.utilities.Constants; import org.fossasia.phimpme.utilities.NotificationHandler; import org.fossasia.phimpme.utilities.SnackBarHandler; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.ref.WeakReference; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.Locale; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import butterknife.BindView; import butterknife.ButterKnife; import io.realm.Realm; import io.realm.RealmQuery; import io.realm.RealmResults; import static org.fossasia.phimpme.gallery.data.base.SortingMode.DATE; import static org.fossasia.phimpme.gallery.data.base.SortingMode.NAME; import static org.fossasia.phimpme.gallery.data.base.SortingMode.NUMERIC; import static org.fossasia.phimpme.gallery.data.base.SortingMode.SIZE; import static org.fossasia.phimpme.gallery.util.ThemeHelper.LIGHT_THEME; import static org.fossasia.phimpme.utilities.ActivitySwitchHelper.context; public class LFMainActivity extends SharedMediaActivity { private static String TAG = "AlbumsAct"; private LFMainActivity activityContext; private int REQUEST_CODE_SD_CARD_PERMISSIONS = 42; private static final int BUFFER = 80000; private boolean about = false, settings = false, uploadHistory = false, favourites = false, trashbin = false; private CustomAlbumsHelper customAlbumsHelper = CustomAlbumsHelper.getInstance(LFMainActivity.this); private PreferenceUtil SP; private SecurityHelper securityObj; private AlbumsAdapter albumsAdapter; private GridSpacingItemDecoration rvAlbumsDecoration; private SwipeRefreshLayout.OnRefreshListener refreshListener; private MediaAdapter mediaAdapter; private GridSpacingItemDecoration rvMediaDecoration; private SelectAlbumBottomSheet bottomSheetDialogFragment; private BottomNavigationView navigationView; private boolean hidden = false, pickMode = false, editMode = false, albumsMode = true, firstLaunch = true, localFolder = true, hidenav = false; //to handle pinch gesture private ScaleGestureDetector mScaleGestureDetector; //To handle all photos/Album conditions public boolean all_photos = false; private boolean checkForReveal = true; final String REVIEW_ACTION = "com.android.camera.action.REVIEW"; public static ArrayList<Media> listAll; public int size; public int pos; ArrayList<String> path; private ArrayList<Media> media; private ArrayList<Media> selectedMedias = new ArrayList<>(); private ArrayList<Media> selectedAlbumMedia = new ArrayList<>(); public boolean visible; private ArrayList<Album> albList; //To handle favourite collection private Realm realm; private ArrayList<Media> favouriteslist; public boolean fav_photos = false; private IconicsImageView favicon; private CustomScrollBarRecyclerView rvAlbums; private CustomScrollBarRecyclerView rvMedia; // To handle back pressed boolean doubleBackToExitPressedOnce = false; private boolean fromOnClick = false; // Binding various views with Butterknife private SearchView searchView; @BindView(R.id.toolbar) protected Toolbar toolbar; @BindView(R.id.swipeRefreshLayout) protected SwipeRefreshLayout swipeRefreshLayout; @BindView(R.id.drawer_layout) protected DrawerLayout mDrawerLayout; @BindView(R.id.fab_scroll_up) protected FloatingActionButton fabScrollUp; @BindView(R.id.Drawer_Setting_Item) protected TextView drawerSettingText; @BindView(R.id.Drawer_About_Item) protected TextView drawerAboutText; @BindView(R.id.Drawer_share_Item) protected TextView drawerShareText; @BindView(R.id.Drawer_rate_Item) protected TextView drawerRateText; @BindView(R.id.Drawer_Upload_Item) protected TextView drawerUploadText; @BindView(R.id.Drawer_TrashBin_Item) protected TextView drawerTrashText; @BindView(R.id.Drawer_Setting_Icon) protected IconicsImageView drawerSettingIcon; @BindView(R.id.Drawer_About_Icon) protected IconicsImageView drawerAboutIcon; @BindView(R.id.Drawer_share_Icon) protected IconicsImageView drawerShareIcon; @BindView(R.id.Drawer_rate_Icon) protected IconicsImageView drawerRateIcon; @BindView(R.id.Drawer_Upload_Icon) protected IconicsImageView drawerUploadIcon; @BindView(R.id.Drawer_trashbin_Icon) protected IconicsImageView drawerTrashIcon; @BindView(R.id.drawer_scrollbar) protected ScrollView scrollView; @BindView(R.id.appbar_toolbar) protected View toolbari; @BindView(R.id.nothing_to_show) protected TextView nothingToShow; @BindView(R.id.no_search_results) protected TextView textView; @BindView(R.id.Drawer_Default_Icon) protected IconicsImageView defaultIcon; @BindView(R.id.Drawer_hidden_Icon) protected IconicsImageView hiddenIcon; @BindView(R.id.Drawer_Default_Item) protected TextView defaultText; @BindView(R.id.Drawer_hidden_Item) protected TextView hiddenText; @BindView(R.id.star_image_view) protected ImageView starImageView; /* editMode- When true, user can select items by clicking on them one by one */ /** * Handles long clicks on photos. * If first long click on photo (editMode = false), go into selection mode and set editMode = true. * If not first long click, means that already in selection mode- s0 select all photos upto chosen one. */ private View.OnLongClickListener photosOnLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { if (checkForReveal) { enterReveal(); checkForReveal = false; } Media m = (Media) v.findViewById(R.id.photo_path).getTag(); //If first long press, turn on selection mode hideNavigationBar(); hidenav = true; if (!all_photos && !fav_photos) { appBarOverlay(); if (!editMode) { mediaAdapter.notifyItemChanged(getAlbum().toggleSelectPhoto(m)); editMode = true; } else getAlbum().selectAllPhotosUpTo(getAlbum().getIndex(m), mediaAdapter); invalidateOptionsMenu(); } else if (all_photos && !fav_photos) { if (!editMode) { mediaAdapter.notifyItemChanged(toggleSelectPhoto(m)); editMode = true; } } else if (fav_photos && !all_photos) { if (!editMode) { mediaAdapter.notifyItemChanged(toggleSelectPhoto(m)); editMode = true; } } else selectAllPhotosUpTo(getImagePosition(m.getPath()), mediaAdapter); return true; } }; /** * Helper method for making reveal animation for toolbar when any item is selected by long click. */ private void enterReveal() { // get the center for the clipping circle int cx = toolbari.getMeasuredWidth() / 2; int cy = toolbari.getMeasuredHeight() / 2; // get the final radius for the clipping circle int finalRadius = Math.max(toolbari.getWidth(), toolbari.getHeight()) / 2; // create the animator for this view Animator anim = ViewAnimationUtils.createCircularReveal(toolbari, cx, cy, 5, finalRadius); anim.start(); } /** * Helper method for making reveal animation for toolbar when back is presses in edit mode. */ private void exitReveal() { // get the center for the clipping circle int cx = toolbari.getMeasuredWidth() / 2; int cy = toolbari.getMeasuredHeight() / 2; // get the final radius for the clipping circle int finalRadius = Math.max(toolbari.getWidth(), toolbari.getHeight()) / 2; // create the animator for this view Animator anim = ViewAnimationUtils.createCircularReveal(toolbari, cx, cy, finalRadius, 5); anim.start(); } private int toggleSelectPhoto(Media m) { if (m != null) { m.setSelected(!m.isSelected()); if (m.isSelected()) selectedMedias.add(m); else selectedMedias.remove(m); } if (selectedMedias.size() == 0) { getNavigationBar(); editMode = false; toolbar.setTitle(getString(R.string.all)); } else { if (!fav_photos) { toolbar.setTitle(selectedMedias.size() + "/" + size); } else if (fav_photos) { toolbar.setTitle(selectedMedias.size() + "/" + favouriteslist.size()); } } invalidateOptionsMenu(); return getImagePosition(m.getPath()); } public void clearSelectedPhotos() { for (Media m : selectedMedias) m.setSelected(false); if (selectedMedias != null) selectedMedias.clear(); if (localFolder) toolbar.setTitle(getString(R.string.local_folder)); else toolbar.setTitle(getString(R.string.hidden_folder)); } public void selectAllPhotos() { if (all_photos && !fav_photos) { for (Media m : listAll) { m.setSelected(true); selectedMedias.add(m); } toolbar.setTitle(selectedMedias.size() + "/" + size); } else if (!all_photos && fav_photos) { for (Media m : favouriteslist) { m.setSelected(true); if (m.isSelected()) selectedMedias.add(m); } toolbar.setTitle(selectedMedias.size() + "/" + favouriteslist.size()); } } public void selectAllPhotosUpTo(int targetIndex, MediaAdapter adapter) { int indexRightBeforeOrAfter = -1; int indexNow; for (Media sm : selectedMedias) { indexNow = getImagePosition(sm.getPath()); if (indexRightBeforeOrAfter == -1) indexRightBeforeOrAfter = indexNow; if (indexNow > targetIndex) break; indexRightBeforeOrAfter = indexNow; } if (indexRightBeforeOrAfter != -1) { for (int index = Math.min(targetIndex, indexRightBeforeOrAfter); index <= Math.max(targetIndex, indexRightBeforeOrAfter); index++) { if (listAll.get(index) != null && !listAll.get(index).isSelected()) { listAll.get(index).setSelected(true); selectedMedias.add(listAll.get(index)); adapter.notifyItemChanged(index); } } } toolbar.setTitle(selectedMedias.size() + "/" + size); } public void populateAlbum() { albList = new ArrayList<>(); for (Album album : getAlbums().dispAlbums) { albList.add(album); } } /** * Handles short clicks on photos. * If in selection mode (editMode = true) , select the photo if it is unselected and unselect it if it's selected. * This mechanism makes it possible to select photos one by one by short-clicking on them. * If not in selection mode (editMode = false) , get current photo from album and open it in singleActivity */ private View.OnClickListener photosOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { Media m = (Media) v.findViewById(R.id.photo_path).getTag(); if (all_photos) { pos = getImagePosition(m.getPath()); } if (fav_photos) { pos = getImagePosition(m.getPath()); } if (!all_photos && !fav_photos) { if (!pickMode) { //if in selection mode, toggle the selected/unselect state of photo if (editMode) { appBarOverlay(); mediaAdapter.notifyItemChanged(getAlbum().toggleSelectPhoto(m)); if (getAlbum().selectedMedias.size() == 0) getNavigationBar(); invalidateOptionsMenu(); } else { v.setTransitionName(getString(R.string.transition_photo)); getAlbum().setCurrentPhotoIndex(m); Intent intent = new Intent(LFMainActivity.this, SingleMediaActivity.class); intent.putExtra("path", Uri.fromFile(new File(m.getPath())).toString()); ActivityOptionsCompat options = ActivityOptionsCompat. makeSceneTransitionAnimation(LFMainActivity.this, v, v.getTransitionName()); intent.setAction(SingleMediaActivity.ACTION_OPEN_ALBUM); startActivity(intent, options.toBundle()); } } else { setResult(RESULT_OK, new Intent().setData(m.getUri())); finish(); } } else if (all_photos && !fav_photos) { if (!editMode) { Intent intent = new Intent(REVIEW_ACTION, Uri.fromFile(new File(m.getPath()))); intent.putExtra(getString(R.string.all_photo_mode), true); intent.putExtra(getString(R.string.position), pos); intent.putExtra(getString(R.string.allMediaSize), size); v.setTransitionName(getString(R.string.transition_photo)); ActivityOptionsCompat options = ActivityOptionsCompat. makeSceneTransitionAnimation(LFMainActivity.this, v, v.getTransitionName()); intent.setClass(getApplicationContext(), SingleMediaActivity.class); startActivity(intent, options.toBundle()); } else { mediaAdapter.notifyItemChanged(toggleSelectPhoto(m)); } } else if (!all_photos && fav_photos) { if (!editMode) { Intent intent = new Intent(REVIEW_ACTION, Uri.fromFile(new File(m.getPath()))); intent.putExtra("fav_photos", true); intent.putExtra(getString(R.string.position), pos); intent.putParcelableArrayListExtra("favouriteslist", favouriteslist); intent.putExtra(getString(R.string.allMediaSize), favouriteslist.size()); v.setTransitionName(getString(R.string.transition_photo)); ActivityOptionsCompat options = ActivityOptionsCompat. makeSceneTransitionAnimation(LFMainActivity.this, v, v.getTransitionName()); intent.setClass(getApplicationContext(), SingleMediaActivity.class); startActivity(intent, options.toBundle()); } else { mediaAdapter.notifyItemChanged(toggleSelectPhoto(m)); } } } }; private View.OnLongClickListener albumOnLongCLickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { final Album album = (Album) v.findViewById(R.id.album_name).getTag(); if(securityObj.isActiveSecurity() && securityObj.isPasswordOnfolder()) { final boolean passco[] = {false}; if (check(album.getPath())) { AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog(LFMainActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later //to avoid dismiss of the dialog on wrong password } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); passwordDialog.show(); AlertDialogsHelper.setButtonTextColor( new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE) .setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { passwordDialog.dismiss(); if (checkForReveal) { enterReveal(); checkForReveal = false; } albumsAdapter.notifyItemChanged(getAlbums().toggleSelectAlbum(album)); editMode = true; invalidateOptionsMenu(); if (getAlbums().getSelectedCount() == 0) getNavigationBar(); else { hideNavigationBar(); hidenav = true; } } // if password is incorrect, notify user of incorrect password else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler .showWithBottomMargin(mDrawerLayout, getString(R.string.wrong_password), navigationView.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else { if (checkForReveal) { enterReveal(); checkForReveal = false; } albumsAdapter.notifyItemChanged(getAlbums().toggleSelectAlbum(album)); editMode = true; invalidateOptionsMenu(); if (getAlbums().getSelectedCount() == 0) getNavigationBar(); else { hideNavigationBar(); hidenav = true; } } } else { if (checkForReveal) { enterReveal(); checkForReveal = false; } //for selecting albums upto a particular range if(editMode) { int currentAlbum = getAlbums().getCurrentAlbumIndex(album); getAlbums().selectAllPhotosUpToAlbums(currentAlbum, albumsAdapter); } albumsAdapter.notifyItemChanged(getAlbums().toggleSelectAlbum(album)); editMode = true; invalidateOptionsMenu(); if (getAlbums().getSelectedCount() == 0) getNavigationBar(); else { hideNavigationBar(); hidenav = true; } } return true; } }; private boolean check(String path) { boolean dr = false; for (String s : securityObj.getSecuredfolders()) { if (s.equals(path)) { dr = true; break; } } return dr; } private View.OnClickListener albumOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { fromOnClick = true; final Album album = (Album) v.findViewById(R.id.album_name).getTag(); showAppBar(); //int index = Integer.parseInt(v.findViewById(R.id.album_name).getTag().toString()); if (editMode) { albumsAdapter.notifyItemChanged(getAlbums().toggleSelectAlbum(album)); if (getAlbums().getSelectedCount() == 0) getNavigationBar(); invalidateOptionsMenu(); } else if(securityObj.isActiveSecurity() && securityObj.isPasswordOnfolder()){ final boolean[] passco = {false}; if (check(album.getPath())) { AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog(LFMainActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later //to avoid dismiss of the dialog on wrong password } }); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); passwordDialog.show(); AlertDialogsHelper.setButtonTextColor( new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE) .setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { passwordDialog.dismiss(); getAlbums().setCurrentAlbum(album); displayCurrentAlbumMedia(true); } // if password is incorrect, notify user of incorrect password else { passco[0] =true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler .showWithBottomMargin(mDrawerLayout, getString(R.string.wrong_password), navigationView.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else { getAlbums().setCurrentAlbum(album); displayCurrentAlbumMedia(true); } } else { getAlbums().setCurrentAlbum(album); displayCurrentAlbumMedia(true); } } }; /** * Method for clearing the scroll flags. */ private void appBarOverlay() { AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbar.getLayoutParams(); params.setScrollFlags(AppBarLayout.LayoutParams.SCROLL_FLAG_EXIT_UNTIL_COLLAPSED); // clear all scroll flags } /** * Method for adding the scroll flags. */ private void clearOverlay() { AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbar.getLayoutParams(); params.setScrollFlags(AppBarLayout.LayoutParams.SCROLL_FLAG_SCROLL | AppBarLayout.LayoutParams.SCROLL_FLAG_ENTER_ALWAYS); } private void showAppBar() { if (toolbar.getParent() instanceof AppBarLayout) { ((AppBarLayout)toolbar.getParent()).setExpanded(true, true); } } public int getImagePosition(String path) { int pos = 0; if (all_photos) { for (int i = 0; i < listAll.size(); i++) { if (listAll.get(i).getPath().equals(path)) { pos = i; break; } } } else if (fav_photos) { Collections.sort(favouriteslist, MediaComparators.getComparator(getAlbum().settings.getSortingMode(), getAlbum().settings .getSortingOrder())); for (int i = 0; i < favouriteslist.size(); i++) { if (favouriteslist.get(i).getPath().equals(path)) { pos = i; break; } } } return pos; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.e("TAG", "lfmain"); ButterKnife.bind(this); navigationView = (BottomNavigationView) findViewById(R.id.bottombar); favicon = (IconicsImageView) findViewById(R.id.Drawer_favourite_Icon); rvAlbums = (CustomScrollBarRecyclerView) findViewById(R.id.grid_albums); rvMedia = (CustomScrollBarRecyclerView) findViewById(R.id.grid_photos); overridePendingTransition(R.anim.right_to_left, R.anim.left_to_right); SP = PreferenceUtil.getInstance(getApplicationContext()); albumsMode = true; editMode = false; securityObj = new SecurityHelper(LFMainActivity.this); if (getIntent().getExtras() != null) pickMode = getIntent().getExtras().getBoolean(SplashScreen.PICK_MODE); SP.putBoolean(getString(R.string.preference_use_alternative_provider), false); initUI(); activityContext = this; new initAllPhotos().execute(); new SortModeSet(activityContext).execute(DATE); displayData(getIntent().getExtras()); checkNothing(); populateAlbum(); navigationView.setOnNavigationItemSelectedListener(new BottomNavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(@NonNull MenuItem item) { int itemID = item.getItemId(); if (itemID == R.id.navigation_home) { if(textView.getVisibility() == View.VISIBLE){ textView.setVisibility(View.GONE); } if (!localFolder) { hidden = false; localFolder = true; findViewById(R.id.ll_drawer_hidden).setBackgroundColor(Color.TRANSPARENT); findViewById(R.id.ll_drawer_Default).setBackgroundColor(getHighlightedItemColor()); tint(); } displayAlbums(); return true; } return LFMainActivity.super.onNavigationItemSelected(item); } }); } @Override public void onResume() { super.onResume(); ActivitySwitchHelper.setContext(this); securityObj.updateSecuritySetting(); setupUI(); if (all_photos && !fav_photos) { new PrepareAllPhotos(activityContext).execute(); } if (!all_photos && fav_photos) { new FavouritePhotos(activityContext).execute(); } if (!all_photos && !fav_photos) { if (SP.getBoolean("auto_update_media", false)) { if (albumsMode) { if (!firstLaunch) new PrepareAlbumTask(activityContext).execute(); } else new PreparePhotosTask(activityContext).execute(); } else { albumsAdapter.notifyDataSetChanged(); mediaAdapter.notifyDataSetChanged(); } } invalidateOptionsMenu(); firstLaunch = false; } private void displayCurrentAlbumMedia(boolean reload) { toolbar.setTitle(getAlbum().getName()); toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_arrow_back)); mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED); mediaAdapter.swapDataSet(getAlbum().getMedia(), false); if (reload) new PreparePhotosTask(activityContext).execute(); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { displayAlbums(); } }); albumsMode = editMode = false; invalidateOptionsMenu(); } private void displayAllMedia(boolean reload) { clearSelectedPhotos(); toolbar.setTitle(getString(R.string.all_media)); toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_arrow_back)); mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED); mediaAdapter.swapDataSet(listAll, false); if (reload) new PrepareAllPhotos(activityContext).execute(); if (reload) new PrepareAllPhotos(activityContext).execute(); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { displayAlbums(); } }); albumsMode = editMode = false; invalidateOptionsMenu(); } private void getfavouriteslist() { favouriteslist = new ArrayList<Media>(); ArrayList<String> todelete = new ArrayList<>(); realm = Realm.getDefaultInstance(); RealmQuery<FavouriteImagesModel> favouriteImagesModelRealmQuery = realm.where(FavouriteImagesModel.class); int count = Integer.parseInt(String.valueOf(favouriteImagesModelRealmQuery.count())); for (int i = 0; i < count; i++) { final String path = favouriteImagesModelRealmQuery.findAll().get(i).getPath(); if (new File(favouriteImagesModelRealmQuery.findAll().get(i).getPath()).exists()) { favouriteslist.add(new Media(new File(favouriteImagesModelRealmQuery.findAll().get(i).getPath()))); } else { todelete.add(path); } } for(int i = 0; i < todelete.size(); i++){ final String path = todelete.get(i); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> result = realm.where(FavouriteImagesModel.class).equalTo ("path", path).findAll(); result.deleteAllFromRealm(); } }); } } private void displayfavourites() { toolbar.setTitle(getResources().getString(R.string.favourite_title)); getfavouriteslist(); toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_arrow_back)); mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED); fav_photos=true; mediaAdapter.swapDataSet(favouriteslist, true); if(fav_photos){ new FavouritePhotos(activityContext).execute(); } toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { displayAlbums(); } }); albumsMode = editMode = all_photos = false; invalidateOptionsMenu(); } private void displayAlbums() { all_photos = false; fav_photos = false; displayAlbums(true); } private void displayAlbums(boolean reload) { if (localFolder) { toolbar.setTitle(getString(R.string.local_folder)); } else { toolbar.setTitle(getString(R.string.hidden_folder)); } toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_menu)); mDrawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED); albumsAdapter.swapDataSet(getAlbums().dispAlbums); if (reload) new PrepareAlbumTask(activityContext).execute(); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mDrawerLayout.openDrawer(GravityCompat.START); } }); albumsMode = true; editMode = false; invalidateOptionsMenu(); mediaAdapter.swapDataSet(new ArrayList<Media>(), false); rvMedia.scrollToPosition(0); } private ArrayList<Media> getselecteditems(){ ArrayList<Media> storeselmedia = new ArrayList<>(); for(Media m: getAlbum().getSelectedMedia()){ storeselmedia.add(m); } return storeselmedia; } @Override public void onConfigurationChanged(Configuration newConfig) { int photopos = 0; int albumpos = 0; super.onConfigurationChanged(newConfig); if(albumsMode){ albumpos = ((GridLayoutManager) rvAlbums.getLayoutManager()).findFirstVisibleItemPosition(); updateColumnsRvs(); (rvAlbums.getLayoutManager()).scrollToPosition(albumpos); } else { photopos = ((GridLayoutManager) rvMedia.getLayoutManager()).findFirstVisibleItemPosition(); updateColumnsRvs(); (rvMedia.getLayoutManager()).scrollToPosition(photopos); } } private boolean displayData(Bundle data) { if (data != null) { switch (data.getInt(SplashScreen.CONTENT)) { case SplashScreen.ALBUMS_PREFETCHED: displayAlbums(false); // we pass the albumMode here . If true, show rvAlbum recyclerView. If false, show rvMedia recyclerView toggleRecyclersVisibility(true); return true; case SplashScreen.ALBUMS_BACKUP: displayAlbums(true); // we pass the albumMode here . If true, show rvAlbum recyclerView. If false, show rvMedia recyclerView toggleRecyclersVisibility(true); return true; case SplashScreen.PHOTOS_PREFETCHED: //TODO ask password if hidden new Thread(new Runnable() { @Override public void run() { getAlbums().loadAlbums(getApplicationContext(), getAlbum().isHidden()); } }).start(); displayCurrentAlbumMedia(false); // we pass the albumMode here . If true, show rvAlbum recyclerView. If false, show rvMedia recyclerView toggleRecyclersVisibility(false); return true; } } displayAlbums(true); return false; } private class initAllPhotos extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... arg0) { listAll = StorageProvider.getAllShownImages(LFMainActivity.this); size = listAll.size(); media = listAll; Collections.sort(listAll, MediaComparators.getComparator(getAlbum().settings.getSortingMode(), getAlbum().settings.getSortingOrder())); return null; } } private void initUI() { clearOverlay(); setSupportActionBar(toolbar); rvAlbums.setHasFixedSize(true); rvAlbums.setItemAnimator(new DefaultItemAnimator()); rvMedia.setHasFixedSize(true); rvMedia.setItemAnimator(new DefaultItemAnimator()); albumsAdapter = new AlbumsAdapter(getAlbums().dispAlbums, LFMainActivity.this); albumsAdapter.setOnClickListener(albumOnClickListener); albumsAdapter.setOnLongClickListener(albumOnLongCLickListener); rvAlbums.setAdapter(albumsAdapter); //set scale gesture detector for resizing the gridItem mScaleGestureDetector = new ScaleGestureDetector(this, new ScaleGestureDetector.SimpleOnScaleGestureListener() { @Override public boolean onScale(ScaleGestureDetector detector) { if (detector.getCurrentSpan() > 200 && detector.getTimeDelta() > 200) { int spanCount; if (albumsMode) spanCount = columnsCount(); else spanCount = mediaCount(); //zooming out if ((detector.getCurrentSpan() - detector.getPreviousSpan() < -300) && spanCount < 6) { if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { if (albumsMode) SP.putInt("n_columns_folders", spanCount + 1); else SP.putInt("n_columns_media", spanCount + 1); } else { if (albumsMode) SP.putInt("n_columns_folders_landscape", spanCount + 1); else SP.putInt("n_columns_media_landscape", spanCount + 1); } if (albumsMode) updateColumnsRvAlbums(); else updateColumnsRvMedia(); } //zooming in else if ((detector.getCurrentSpan() - detector.getPreviousSpan() > 300) && spanCount > 1) { if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { if (albumsMode) SP.putInt("n_columns_folders", spanCount - 1); else SP.putInt("n_columns_media", spanCount - 1); } else { if (albumsMode) SP.putInt("n_columns_folders_landscape", spanCount - 1); else SP.putInt("n_columns_media_landscape", spanCount - 1); } if (albumsMode) updateColumnsRvAlbums(); else updateColumnsRvMedia(); } } return false; } }); //set touch listener on recycler view rvAlbums.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { mScaleGestureDetector.onTouchEvent(event); return false; } }); rvMedia.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { mScaleGestureDetector.onTouchEvent(event); return false; } }); mediaAdapter = new MediaAdapter(getAlbum().getMedia(), LFMainActivity.this); mediaAdapter.setOnClickListener(photosOnClickListener); mediaAdapter.setOnLongClickListener(photosOnLongClickListener); rvMedia.setAdapter(mediaAdapter); int spanCount = columnsCount(); rvAlbumsDecoration = new GridSpacingItemDecoration(spanCount, Measure.pxToDp(3, getApplicationContext()), true); rvAlbums.addItemDecoration(rvAlbumsDecoration); rvAlbums.setLayoutManager(new GridLayoutManager(this, spanCount)); spanCount = mediaCount(); rvMediaDecoration = new GridSpacingItemDecoration(spanCount, Measure.pxToDp(3, getApplicationContext()), true); rvMedia.setLayoutManager(new GridLayoutManager(getApplicationContext(), spanCount)); rvMedia.addItemDecoration(rvMediaDecoration); /**** SWIPE TO REFRESH ****/ swipeRefreshLayout.setColorSchemeColors(getAccentColor()); swipeRefreshLayout.setProgressBackgroundColorSchemeColor(getBackgroundColor()); refreshListener = new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { getNavigationBar(); if (albumsMode) { getAlbums().clearSelectedAlbums(); new PrepareAlbumTask(activityContext).execute(); } else { if (!all_photos && !fav_photos) { getAlbum().clearSelectedPhotos(); new PreparePhotosTask(activityContext).execute(); } else { if (all_photos && !fav_photos) { new PrepareAllPhotos(activityContext).execute(); } else if (!all_photos && fav_photos) { new FavouritePhotos(activityContext).execute(); } } } } }; swipeRefreshLayout.setOnRefreshListener(refreshListener); /**** DRAWER ****/ mDrawerLayout.addDrawerListener(new ActionBarDrawerToggle(this, mDrawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close) { public void onDrawerClosed(View view) { //Put your code here // materialMenu.animateIconState(MaterialMenuDrawable.IconState.BURGER); Intent intent = null; if (settings) { intent = new Intent(LFMainActivity.this, SettingsActivity.class); startActivity(intent); settings = false; } else if (about) { intent = new Intent(LFMainActivity.this, AboutActivity.class); startActivity(intent); about = false; } else if (uploadHistory) { intent = new Intent(LFMainActivity.this, UploadHistory.class); startActivity(intent); uploadHistory = false; } else if (favourites) { displayfavourites(); favourites = false; } else if (trashbin) { Intent intent1 = new Intent(LFMainActivity.this, TrashBinActivity.class); startActivity(intent1); trashbin = false; } } public void onDrawerOpened(View drawerView) { //Put your code here //materialMenu.animateIconState(MaterialMenuDrawable.IconState.ARROW); } }); /** * Floating Action Button to Scroll Up */ setUpFab(); setRecentApp(getString(R.string.app_name)); setupUI(); if (pickMode) { hideNavigationBar(); swipeRefreshLayout.setPadding(0, 0, 0, 0); } } /** * Method to set scroll listeners for recycler view */ private void setUpFab() { fabScrollUp.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { rvMedia.smoothScrollToPosition(0); fabScrollUp.hide(); } }); fabScrollUp.hide(); rvMedia.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { LinearLayoutManager linearLayoutManager = (LinearLayoutManager) recyclerView.getLayoutManager(); if (linearLayoutManager.findFirstVisibleItemPosition() > 30 && !fabScrollUp.isShown()) fabScrollUp.show(); else if (linearLayoutManager.findFirstVisibleItemPosition() < 30 && fabScrollUp.isShown()) fabScrollUp.hide(); fabScrollUp.setAlpha(0.7f); } }); } public int columnsCount() { return getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ? SP.getInt("n_columns_folders", 2) : SP.getInt("n_columns_folders_landscape", 3); } public int mediaCount() { return getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ? SP.getInt("n_columns_media", 3) : SP.getInt("n_columns_media_landscape", 4); } private void updateColumnsRvs() { updateColumnsRvAlbums(); updateColumnsRvMedia(); } private void updateColumnsRvAlbums() { int spanCount = columnsCount(); if (spanCount != ((GridLayoutManager) rvAlbums.getLayoutManager()).getSpanCount()) { rvAlbums.removeItemDecoration(rvAlbumsDecoration); rvAlbumsDecoration = new GridSpacingItemDecoration(spanCount, Measure.pxToDp(3, getApplicationContext()), true); rvAlbums.addItemDecoration(rvAlbumsDecoration); rvAlbums.setLayoutManager(new GridLayoutManager(this, spanCount)); } } private void updateColumnsRvMedia() { int spanCount = mediaCount(); if (spanCount != ((GridLayoutManager) rvMedia.getLayoutManager()).getSpanCount()) { ((GridLayoutManager) rvMedia.getLayoutManager()).getSpanCount(); rvMedia.removeItemDecoration(rvMediaDecoration); rvMediaDecoration = new GridSpacingItemDecoration(spanCount, Measure.pxToDp(3, getApplicationContext()), true); rvMedia.setLayoutManager(new GridLayoutManager(getApplicationContext(), spanCount)); rvMedia.addItemDecoration(rvMediaDecoration); } } //region TESTING @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public final void onActivityResult(final int requestCode, final int resultCode, final Intent resultData) { if (resultCode == RESULT_OK) { if (requestCode == REQUEST_CODE_SD_CARD_PERMISSIONS) { Uri treeUri = resultData.getData(); // Persist URI in shared preference so that you can use it later. ContentHelper.saveSdCardInfo(getApplicationContext(), treeUri); getContentResolver().takePersistableUriPermission(treeUri, Intent.FLAG_GRANT_WRITE_URI_PERMISSION); SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.got_permission_wr_sdcard), 0); } } } //endregion private void requestSdCardPermissions() { final AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(LFMainActivity.this, dialogBuilder, R.string.sd_card_write_permission_title, R.string.sd_card_permissions_message, null); dialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) startActivityForResult(new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE), REQUEST_CODE_SD_CARD_PERMISSIONS); } }); AlertDialog alertDialog = dialogBuilder.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); } //region UI/GRAPHIC private void setupUI() { updateColumnsRvs(); //TODO: MUST BE FIXED toolbar.setPopupTheme(getPopupToolbarStyle()); toolbar.setBackgroundColor(getPrimaryColor()); if (localFolder) { toolbar.setTitle(getString(R.string.local_folder)); } else { toolbar.setTitle(getString(R.string.hidden_folder)); } //navigationView.setVisibility(View.VISIBLE); /**** SWIPE TO REFRESH ****/ swipeRefreshLayout.setColorSchemeColors(getAccentColor()); swipeRefreshLayout.setProgressBackgroundColorSchemeColor(getBackgroundColor()); setStatusBarColor(); setNavBarColor(); setDrawerTheme(); rvAlbums.setBackgroundColor(getBackgroundColor()); rvMedia.setBackgroundColor(getBackgroundColor()); rvAlbums.setScrollBarColor(getPrimaryColor()); rvMedia.setScrollBarColor(getPrimaryColor()); mediaAdapter.updatePlaceholder(getApplicationContext()); albumsAdapter.updateTheme(); /**** DRAWER ****/ setScrollViewColor(scrollView); /**** recyclers drawable *****/ Drawable drawableScrollBar = ContextCompat.getDrawable(getApplicationContext(), R.drawable.ic_scrollbar); drawableScrollBar.setColorFilter(new PorterDuffColorFilter(getPrimaryColor(), PorterDuff.Mode.SRC_ATOP)); /**** FAB ****/ fabScrollUp.setBackgroundTintList(ColorStateList.valueOf(getAccentColor())); fabScrollUp.setAlpha(0.7f); } private void setDrawerTheme() { findViewById(R.id.Drawer_Header).setBackgroundColor(getPrimaryColor()); findViewById(R.id.Drawer_Body).setBackgroundColor(getDrawerBackground()); findViewById(R.id.drawer_scrollbar).setBackgroundColor(getDrawerBackground()); findViewById(R.id.Drawer_Body_Divider).setBackgroundColor(getIconColor()); /** TEXT VIEWS **/ int color = getTextColor(); defaultText.setTextColor(color); drawerSettingText.setTextColor(color); drawerAboutText.setTextColor(color); hiddenText.setTextColor(color); drawerShareText.setTextColor(color); drawerRateText.setTextColor(color); drawerUploadText.setTextColor(color); drawerTrashText.setTextColor(color); ((TextView) findViewById(R.id.Drawer_Default_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_Setting_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_About_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_hidden_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_share_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_rate_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_Upload_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_TrashBin_Item)).setTextColor(color); ((TextView) findViewById(R.id.Drawer_favourite_Item)).setTextColor(color); /** ICONS **/ color = getIconColor(); defaultIcon.setColor(color); drawerSettingIcon.setColor(color); drawerAboutIcon.setColor(color); hiddenIcon.setColor(color); drawerShareIcon.setColor(color); drawerRateIcon.setColor(color); drawerUploadIcon.setColor(color); drawerTrashIcon.setColor(color); favicon.setColor(color); // Default setting if (localFolder) findViewById(R.id.ll_drawer_Default).setBackgroundColor(getHighlightedItemColor()); else findViewById(R.id.ll_drawer_hidden).setBackgroundColor(getHighlightedItemColor()); tint(); findViewById(R.id.ll_drawer_Setting).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { settings = true; mDrawerLayout.closeDrawer(GravityCompat.START); } }); findViewById(R.id.ll_drawer_About).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { about = true; mDrawerLayout.closeDrawer(GravityCompat.START); } }); findViewById(R.id.ll_drawer_favourites).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { favourites = true; mDrawerLayout.closeDrawer(GravityCompat.START); } }); findViewById(R.id.ll_drawer_uploadhistory).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { uploadHistory = true; mDrawerLayout.closeDrawer(GravityCompat.START); } }); findViewById(R.id.ll_drawer_trashbin).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { trashbin = true; mDrawerLayout.closeDrawer(GravityCompat.START); //toolbar.setTitle("Trash Bin"); } }); findViewById(R.id.ll_drawer_Default).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { localFolder = true; findViewById(R.id.ll_drawer_hidden).setBackgroundColor(Color.TRANSPARENT); findViewById(R.id.ll_drawer_Default).setBackgroundColor(getHighlightedItemColor()); tint(); toolbar.setTitle(getString(R.string.local_folder)); hidden = false; mDrawerLayout.closeDrawer(GravityCompat.START); new PrepareAlbumTask(activityContext).execute(); } }); findViewById(R.id.ll_drawer_hidden).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { localFolder = false; findViewById(R.id.ll_drawer_Default).setBackgroundColor(Color.TRANSPARENT); findViewById(R.id.ll_drawer_hidden).setBackgroundColor(getHighlightedItemColor()); tint(); toolbar.setTitle(getString(R.string.hidden_folder)); if (securityObj.isActiveSecurity() && securityObj.isPasswordOnHidden()) { final boolean[] passco = {false}; AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog(LFMainActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); passwordDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { hidden = true; mDrawerLayout.closeDrawer(GravityCompat.START); new PrepareAlbumTask(activityContext).execute(); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.wrong_password), 0); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else { hidden = true; mDrawerLayout.closeDrawer(GravityCompat.START); new PrepareAlbumTask(activityContext).execute(); } } }); findViewById(R.id.ll_share_phimpme).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onInviteClicked(); mDrawerLayout.closeDrawer(GravityCompat.START); } }); findViewById(R.id.ll_rate_phimpme).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { final String appPackageName = getPackageName(); try { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("market://details?id=" + appPackageName))); } catch (android.content.ActivityNotFoundException anfe) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://play.google.com/store/apps/details?id=" + appPackageName))); } mDrawerLayout.closeDrawer(GravityCompat.START); } }); } private void onInviteClicked() { Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TEXT, getString(R.string.install_phimpme) + "\n " + getString(R.string.invitation_deep_link)); sendIntent.setType("text/plain"); startActivity(sendIntent); } //endregion private void updateSelectedStuff() { if (albumsMode) { if (getAlbums().getSelectedCount() == 0) { clearOverlay(); checkForReveal = true; swipeRefreshLayout.setEnabled(true); } else { appBarOverlay(); swipeRefreshLayout.setEnabled(false); } if (editMode) toolbar.setTitle(getAlbums().getSelectedCount() + "/" + getAlbums().dispAlbums.size()); else { if (hidden) toolbar.setTitle(getString(R.string.hidden_folder)); else toolbar.setTitle(getString(R.string.local_folder)); toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_menu)); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mDrawerLayout.openDrawer(GravityCompat.START); } }); } } else { if (!all_photos) { if (getAlbum().getSelectedCount() == 0) { clearOverlay(); checkForReveal = true; swipeRefreshLayout.setEnabled(true); } else { appBarOverlay(); swipeRefreshLayout.setEnabled(false); } } else { if (selectedMedias.size() == 0) { clearOverlay(); swipeRefreshLayout.setEnabled(true); } else { appBarOverlay(); swipeRefreshLayout.setEnabled(false); } } if (editMode) { if (!all_photos && !fav_photos) toolbar.setTitle(getAlbum().getSelectedCount() + "/" + getAlbum().getMedia().size()); else if (!fav_photos && all_photos) { toolbar.setTitle(selectedMedias.size() + "/" + size); } else if (fav_photos && !all_photos) { toolbar.setTitle(selectedMedias.size() + "/" + favouriteslist.size()); } } else { if (!all_photos && !fav_photos) toolbar.setTitle(getAlbum().getName()); else if (all_photos && !fav_photos) { toolbar.setTitle(getString(R.string.all_media)); } else if (fav_photos && !all_photos) { toolbar.setTitle(getResources().getString(R.string.favourite_title)); } toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_arrow_back)); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { displayAlbums(); } }); } } if (editMode) { toolbar.setNavigationIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_clear)); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { getNavigationBar(); finishEditMode(); clearSelectedPhotos(); } }); } } //called from onBackPressed() private void finishEditMode() { if (editMode) enterReveal(); editMode = false; if (albumsMode) { getAlbums().clearSelectedAlbums(); albumsAdapter.notifyDataSetChanged(); } else { if (!all_photos) { getAlbum().clearSelectedPhotos(); mediaAdapter.notifyDataSetChanged(); } else { clearSelectedPhotos(); mediaAdapter.notifyDataSetChanged(); } } invalidateOptionsMenu(); } private void checkNothing() { nothingToShow.setTextColor(getTextColor()); nothingToShow.setText(getString(R.string.there_is_nothing_to_show)); nothingToShow.setVisibility((albumsMode && getAlbums().dispAlbums.size() == 0) || (!albumsMode && getAlbum().getMedia().size() == 0) ? View.VISIBLE : View.GONE); TextView a = (TextView) findViewById(R.id.nothing_to_show); a.setTextColor(getTextColor()); a.setVisibility((albumsMode && getAlbums().dispAlbums.size() == 0 && !fav_photos) || (!albumsMode && getAlbum ().getMedia().size() == 0 && !fav_photos) || (fav_photos && favouriteslist.size() == 0) ? View .VISIBLE : View .GONE); starImageView.setVisibility(View.GONE); } private void checkNothingFavourites() { nothingToShow.setTextColor(getTextColor()); nothingToShow.setText(R.string.no_favourites_text); nothingToShow.setVisibility((albumsMode && getAlbums().dispAlbums.size() == 0 && !fav_photos) || (!albumsMode && getAlbum ().getMedia().size() == 0 && !fav_photos) || (fav_photos && favouriteslist.size() == 0) ? View .VISIBLE : View .GONE); starImageView.setVisibility((albumsMode && getAlbums().dispAlbums.size() == 0 && !fav_photos) || (!albumsMode && getAlbum ().getMedia().size() == 0 && !fav_photos) || (fav_photos && favouriteslist.size() == 0) ? View .VISIBLE : View .GONE); if (getBaseTheme() != LIGHT_THEME) starImageView.setColorFilter(ContextCompat.getColor(this, R.color.white), PorterDuff.Mode.SRC_ATOP); else starImageView.setColorFilter(ContextCompat.getColor(this, R.color.accent_grey), PorterDuff.Mode.SRC_ATOP); } private void showsnackbar(Boolean result) { if(result) { SnackBarHandler.show(mDrawerLayout,getApplicationContext().getString(R.string.photo_deleted_msg), navigationView.getHeight()); } else { SnackBarHandler.show(mDrawerLayout,getApplicationContext().getString(R.string.photo_deletion_failed), navigationView.getHeight()); } } private void checkNoSearchResults(String result){ textView.setText(getString(R.string.null_search_result) + " " + '"' + result + '"' ); textView.setTextColor(getTextColor()); textView.setVisibility(View.VISIBLE); } //region MENU @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_albums, menu); MenuItem menuitem = menu.findItem(R.id.search_action); searchView = (SearchView) MenuItemCompat.getActionView(menuitem); searchView.setOnQueryTextFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(final View view, boolean b) { if (b) { view.postDelayed(new Runnable() { @Override public void run() { InputMethodManager imm = (InputMethodManager) getSystemService(Context .INPUT_METHOD_SERVICE); imm.showSoftInput(view.findFocus(), 0); } }, 200); } else { InputMethodManager imm = (InputMethodManager) getSystemService(Context .INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(view.getWindowToken(), 0); } } }); if (albumsMode) { searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { return false; } @Override public boolean onQueryTextChange(String newText) { return searchTitle(newText); } }); menu.findItem(R.id.select_all).setVisible(getAlbums().getSelectedCount() != albumsAdapter.getItemCount() ? true : false); menu.findItem(R.id.ascending_sort_action).setChecked(getAlbums().getSortingOrder() == SortingOrder.ASCENDING); switch (getAlbums().getSortingMode()) { case NAME: menu.findItem(R.id.name_sort_action).setChecked(true); break; case SIZE: menu.findItem(R.id.size_sort_action).setChecked(true); break; case DATE: default: menu.findItem(R.id.date_taken_sort_action).setChecked(true); break; case NUMERIC: menu.findItem(R.id.numeric_sort_action).setChecked(true); break; } } else { getfavouriteslist(); menu.findItem(R.id.select_all).setVisible(getAlbum().getSelectedCount() == mediaAdapter .getItemCount() || selectedMedias.size() == size || (selectedMedias.size() == favouriteslist.size () && fav_photos) ? false : true); menu.findItem(R.id.ascending_sort_action).setChecked(getAlbum().settings.getSortingOrder() == SortingOrder.ASCENDING); switch (getAlbum().settings.getSortingMode()) { case NAME: menu.findItem(R.id.name_sort_action).setChecked(true); break; case SIZE: menu.findItem(R.id.size_sort_action).setChecked(true); break; case DATE: default: menu.findItem(R.id.date_taken_sort_action).setChecked(true); break; case NUMERIC: menu.findItem(R.id.numeric_sort_action).setChecked(true); break; } } menu.findItem(R.id.hideAlbumButton).setTitle(hidden ? getString(R.string.unhide) : getString(R.string.hide)); menu.findItem(R.id.delete_action).setIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_delete)); menu.findItem(R.id.sort_action).setIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_sort)); menu.findItem(R.id.sharePhotos).setIcon(getToolbarIcon(GoogleMaterial.Icon.gmd_share)); return true; } public boolean searchTitle(String newText) { if (!fromOnClick) { String queryText = newText; queryText = queryText.toLowerCase(); final ArrayList<Album> newList = new ArrayList<>(); for (Album album : albList) { String name = album.getName().toLowerCase(); if (name.contains(queryText)) { newList.add(album); } } if(newList.isEmpty()){ checkNoSearchResults(newText); } else{ if(textView.getVisibility() == View.VISIBLE){ textView.setVisibility(View.INVISIBLE); } } albumsAdapter.swapDataSet(newList); } else { fromOnClick = false; } return true; } @Override public boolean onPrepareOptionsMenu(final Menu menu) { if (albumsMode) { editMode = getAlbums().getSelectedCount() != 0; menu.setGroupVisible(R.id.album_options_menu, editMode); menu.setGroupVisible(R.id.photos_option_men, false); menu.findItem(R.id.all_photos).setVisible(!editMode && !hidden); menu.findItem(R.id.search_action).setVisible(!editMode); menu.findItem(R.id.create_gif).setVisible(false); menu.findItem(R.id.create_zip).setVisible(false); menu.findItem(R.id.select_all).setVisible(getAlbums().getSelectedCount() != albumsAdapter.getItemCount() ? true : false); menu.findItem(R.id.settings).setVisible(false); if (getAlbums().getSelectedCount() >= 1) { if (getAlbums().getSelectedCount() > 1) { menu.findItem(R.id.album_details).setVisible(false); } if (getAlbums().getSelectedCount() == 1) { menu.findItem(R.id.search_action).setVisible(false); } } } else { menu.findItem(R.id.search_action).setVisible(false); if (!all_photos && !fav_photos) { editMode = getAlbum().areMediaSelected(); menu.setGroupVisible(R.id.photos_option_men, editMode); menu.setGroupVisible(R.id.album_options_menu, !editMode); menu.findItem(R.id.settings).setVisible(!editMode); menu.findItem(R.id.all_photos).setVisible(false); menu.findItem(R.id.album_details).setVisible(false); } else if (all_photos && !fav_photos) { editMode = selectedMedias.size() != 0; menu.setGroupVisible(R.id.photos_option_men, editMode); menu.setGroupVisible(R.id.album_options_menu, !editMode); menu.findItem(R.id.all_photos).setVisible(false); menu.findItem(R.id.action_move).setVisible(false); menu.findItem(R.id.settings).setVisible(!editMode); menu.findItem(R.id.album_details).setVisible(false); } else if (!all_photos && fav_photos) { editMode = selectedMedias.size() != 0; menu.setGroupVisible(R.id.photos_option_men, editMode); menu.setGroupVisible(R.id.album_options_menu, !editMode); menu.findItem(R.id.settings).setVisible(!editMode); menu.findItem(R.id.create_gif).setVisible(false); menu.findItem(R.id.create_zip).setVisible(false); menu.findItem(R.id.album_details).setVisible(false); menu.findItem(R.id.all_photos).setVisible(false); } menu.findItem(R.id.select_all).setVisible(getAlbum().getSelectedCount() == mediaAdapter .getItemCount() || selectedMedias.size() == size || (selectedMedias.size() == favouriteslist.size () && fav_photos) ? false : true); } togglePrimaryToolbarOptions(menu); updateSelectedStuff(); if(!albumsMode) visible = getAlbum().getSelectedCount() > 0; else visible = false; menu.findItem(R.id.action_copy).setVisible(visible); menu.findItem(R.id.action_move).setVisible((visible || editMode) && !fav_photos); menu.findItem(R.id.action_add_favourites).setVisible((visible || editMode) && (!albumsMode && !fav_photos)); menu.findItem(R.id.excludeAlbumButton).setVisible(editMode && !all_photos && albumsMode && !fav_photos); menu.findItem(R.id.zipAlbumButton).setVisible(editMode && !all_photos && albumsMode && !fav_photos && !hidden && getAlbums().getSelectedCount() == 1); menu.findItem(R.id.delete_action).setVisible((!albumsMode || editMode) && (!all_photos || editMode)); if(fav_photos && favouriteslist.size() == 0 ){ menu.findItem(R.id.delete_action).setVisible(false); menu.findItem(R.id.sort_action).setVisible(false); } menu.findItem(R.id.hideAlbumButton).setVisible(!all_photos && !fav_photos && getAlbums().getSelectedCount() > 0); menu.findItem(R.id.clear_album_preview).setVisible(!albumsMode && getAlbum().hasCustomCover() && !fav_photos && !all_photos); menu.findItem(R.id.renameAlbum).setVisible(((albumsMode && getAlbums().getSelectedCount() == 1) || (!albumsMode && !editMode)) && (!all_photos && !fav_photos)); if (getAlbums().getSelectedCount() == 1) menu.findItem(R.id.set_pin_album).setTitle(getAlbums().getSelectedAlbum(0).isPinned() ? getString(R.string.un_pin) : getString(R.string.pin)); menu.findItem(R.id.set_pin_album).setVisible(albumsMode && getAlbums().getSelectedCount() == 1); menu.findItem(R.id.setAsAlbumPreview).setVisible(!albumsMode && !all_photos && getAlbum() .getSelectedCount() == 1); menu.findItem(R.id.affixPhoto).setVisible((!albumsMode && (getAlbum().getSelectedCount() > 1) || selectedMedias.size() > 1) && !fav_photos); if (albumsMode) menu.findItem(R.id.action_move).setVisible(getAlbums().getSelectedCount() == 1); return super.onPrepareOptionsMenu(menu); } private void togglePrimaryToolbarOptions(final Menu menu) { menu.setGroupVisible(R.id.general_action, !editMode); } //endregion @Override public boolean onOptionsItemSelected(MenuItem item) { getNavigationBar(); switch (item.getItemId()) { case R.id.all_photos: if (!all_photos) { boolean check_security_on_local = true; check_security_on_local = SP.getBoolean(getString(R.string.preference_use_password_on_folder), check_security_on_local); if(securityObj.isActiveSecurity() && check_security_on_local){ final boolean[] passco = {false}; AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog(LFMainActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); passwordDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { all_photos = true; displayAllMedia(true); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else{ all_photos = true; displayAllMedia(true); } } else { displayAlbums(); } return true; case R.id.album_details: AlertDialog.Builder detailsDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); AlertDialog detailsDialog; detailsDialog = AlertDialogsHelper.getAlbumDetailsDialog(this, detailsDialogBuilder, getAlbums().getSelectedAlbum(0)); detailsDialog.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string .ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { finishEditMode(); } }); detailsDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), detailsDialog); return true; case R.id.select_all: if (albumsMode) { getAlbums().selectAllAlbums(); albumsAdapter.notifyDataSetChanged(); } else { if (!all_photos && !fav_photos) { getAlbum().selectAllPhotos(); mediaAdapter.notifyDataSetChanged(); } else if(all_photos && !fav_photos){ clearSelectedPhotos(); selectAllPhotos(); mediaAdapter.notifyDataSetChanged(); } else if(fav_photos && !all_photos){ clearSelectedPhotos(); selectAllPhotos(); Collections.sort(favouriteslist, MediaComparators.getComparator(getAlbum().settings.getSortingMode(), getAlbum().settings.getSortingOrder())); mediaAdapter.swapDataSet(favouriteslist, true); } } invalidateOptionsMenu(); return true; case R.id.create_gif: new CreateGIFTask().execute(); return true; case R.id.create_zip: path = new ArrayList<>(); if(!albumsMode && !all_photos && !fav_photos){ for(Media m: getAlbum().getSelectedMedia()){ path.add(m.getPath()); } }else if(!albumsMode && all_photos && !fav_photos){ for(Media m: selectedMedias){ path.add(m.getPath()); } } new CreateZipTask().execute(); return true; case R.id.set_pin_album: getAlbums().getSelectedAlbum(0).settings.togglePin(getApplicationContext()); getAlbums().sortAlbums(); getAlbums().clearSelectedAlbums(); invalidateOptionsMenu(); albumsAdapter.notifyDataSetChanged(); return true; case R.id.settings: startActivity(new Intent(LFMainActivity.this, SettingsActivity.class)); return true; case R.id.hideAlbumButton: final AlertDialog.Builder hideDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(LFMainActivity.this, hideDialogBuilder, hidden ? R.string.unhide : R.string.hide, hidden ? R.string.unhide_album_message : R.string.hide_album_message, null); hideDialogBuilder.setPositiveButton(getString(hidden ? R.string.unhide : R.string.hide).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (albumsMode) { if (hidden) getAlbums().unHideSelectedAlbums(getApplicationContext()); else getAlbums().hideSelectedAlbums(getApplicationContext()); albumsAdapter.notifyDataSetChanged(); invalidateOptionsMenu(); } else { if (hidden) getAlbums().unHideAlbum(getAlbum().getPath(), getApplicationContext()); else getAlbums().hideAlbum(getAlbum().getPath(), getApplicationContext()); displayAlbums(true); } } }); if (!hidden) { hideDialogBuilder.setNeutralButton(this.getString(R.string.exclude).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (albumsMode) { getAlbums().excludeSelectedAlbums(getApplicationContext()); albumsAdapter.notifyDataSetChanged(); invalidateOptionsMenu(); } else { customAlbumsHelper.excludeAlbum(getAlbum().getPath()); displayAlbums(true); } } }); } hideDialogBuilder.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); AlertDialog alertDialog = hideDialogBuilder.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE, DialogInterface.BUTTON_NEUTRAL}, getAccentColor(), alertDialog); return true; case R.id.delete_action: getNavigationBar(); class DeletePhotos extends AsyncTask<String, Integer, Boolean> { private boolean succ = false; private int imagesUnfav = 0; @Override protected void onPreExecute() { swipeRefreshLayout.setRefreshing(true); super.onPreExecute(); } @Override protected Boolean doInBackground(String... arg0) { //if in album mode, delete selected albums if (albumsMode) { if (AlertDialogsHelper.check) { succ = addToTrash(); if (succ) { addTrashObjectsToRealm(selectedAlbumMedia); succ = getAlbums().deleteSelectedAlbums(LFMainActivity.this); } } else { succ = getAlbums().deleteSelectedAlbums(LFMainActivity.this); } } else { // if in selection mode, delete selected media if (editMode) { if (!all_photos && !fav_photos) { checkForShare(getAlbum().getSelectedMedia()); //clearSelectedPhotos(); if (AlertDialogsHelper.check) { succ = addToTrash(); if (succ) { addTrashObjectsToRealm(getAlbum().getSelectedMedia()); } getAlbum().clearSelectedPhotos(); } else { succ = getAlbum().deleteSelectedMedia(getApplicationContext()); } } else if (all_photos && !fav_photos) { checkForShare(selectedMedias); // addToTrash(); if (AlertDialogsHelper.check) { succ = addToTrash(); if (succ) { addTrashObjectsToRealm(selectedMedias); } } else { for (Media media : selectedMedias) { String[] projection = {MediaStore.Images.Media._ID}; // Match on the file path String selection = MediaStore.Images.Media.DATA + " = ?"; String[] selectionArgs = new String[]{media.getPath()}; // Query for the ID of the media matching the file path Uri queryUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; ContentResolver contentResolver = getContentResolver(); Cursor c = contentResolver .query(queryUri, projection, selection, selectionArgs, null); if (c.moveToFirst()) { // We found the ID. Deleting the item via the content provider will also remove the file long id = c.getLong(c.getColumnIndexOrThrow(MediaStore.Images.Media._ID)); Uri deleteUri = ContentUris .withAppendedId(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, id); contentResolver.delete(deleteUri, null, null); succ = true; } else { succ = false; // File not found in media store DB } c.close(); } } } else if (!all_photos && fav_photos) { checkForShare(selectedMedias); realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { for (int i = 0; i < selectedMedias.size(); i++) { RealmResults<FavouriteImagesModel> favouriteImagesModels = realm.where (FavouriteImagesModel.class) .equalTo("path", selectedMedias.get(i).getPath()).findAll(); imagesUnfav++; favouriteImagesModels.deleteAllFromRealm(); } } }); succ = true; } } // if not in selection mode, delete current album entirely else if (!editMode) { if (!fav_photos) { checkForShare(getAlbum().getMedia()); if (AlertDialogsHelper.check) { succ = addToTrash(); if (succ) { addTrashObjectsToRealm(getAlbum().getMedia()); } //succ = getAlbums().deleteAlbum(getAlbum(), getApplicationContext()); getAlbum().getMedia().clear(); } else { succ = getAlbums().deleteAlbum(getAlbum(), getApplicationContext()); getAlbum().getMedia().clear(); } } else { checkForShare(favouriteslist); Realm realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmQuery<FavouriteImagesModel> favouriteImagesModelRealmQuery = realm .where(FavouriteImagesModel.class); succ = favouriteImagesModelRealmQuery.findAll().deleteAllFromRealm(); favouriteslist.clear(); } }); } } } return succ; } @Override protected void onPostExecute(Boolean result) { if (result) { // in albumsMode, the selected albums have been deleted. if (albumsMode) { getAlbums().clearSelectedAlbums(); albumsAdapter.notifyDataSetChanged(); } else { if (!all_photos && !fav_photos) { //if all media in current album have been deleted, delete current album too. if (getAlbum().getMedia().size() == 0) { getAlbums().removeCurrentAlbum(); albumsAdapter.notifyDataSetChanged(); displayAlbums(); showsnackbar(succ); swipeRefreshLayout.setRefreshing(true); } else mediaAdapter.swapDataSet(getAlbum().getMedia(), false); } else if(all_photos && !fav_photos){ clearSelectedPhotos(); listAll = StorageProvider.getAllShownImages(LFMainActivity.this); media = listAll; size = listAll.size(); showsnackbar(succ); Collections.sort(listAll, MediaComparators.getComparator(getAlbum().settings .getSortingMode(), getAlbum().settings.getSortingOrder())); mediaAdapter.swapDataSet(listAll, false); } else if(fav_photos && !all_photos){ if (imagesUnfav >= 2) SnackBarHandler.show(mDrawerLayout, imagesUnfav + " " + getResources().getString(R.string.remove_from_favourite)); else SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.single_image_removed)); clearSelectedPhotos(); getfavouriteslist(); new FavouritePhotos(activityContext).execute(); } } } else requestSdCardPermissions(); invalidateOptionsMenu(); checkNothing(); swipeRefreshLayout.setRefreshing(false); } } AlertDialog.Builder deleteDialog = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); if(fav_photos && !all_photos) AlertDialogsHelper.getTextDialog(this, deleteDialog, R.string.remove_from_favourites, R.string.remove_favourites_body, null); else AlertDialogsHelper.getTextCheckboxDialog(this, deleteDialog, R.string.delete, albumsMode || !editMode ? R.string.delete_album_message : R.string.delete_photos_message, null, getResources().getString(R.string.move_to_trashbin), getAccentColor()); deleteDialog.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(fav_photos && !all_photos ? getString(R.string.remove).toUpperCase() : getString(R.string.delete).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog(LFMainActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should be empty. It will be overwritten later //to avoid dismiss of the dialog on wrong password } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); passwordDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // if password is correct, call DeletePhotos and perform deletion if (securityObj.checkPassword(editTextPassword.getText().toString())) { passwordDialog.dismiss(); new DeletePhotos().execute(); } // if password is incorrect, don't delete and notify user of incorrect password else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.wrong_password), navigationView.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else { new DeletePhotos().execute(); } } }); AlertDialog alertDialogDelete = deleteDialog.create(); alertDialogDelete.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialogDelete); return true; case R.id.excludeAlbumButton: final AlertDialog.Builder excludeDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final View excludeDialogLayout = getLayoutInflater().inflate(R.layout.dialog_exclude, null); TextView textViewExcludeTitle = (TextView) excludeDialogLayout.findViewById(R.id.text_dialog_title); TextView textViewExcludeMessage = (TextView) excludeDialogLayout.findViewById(R.id.text_dialog_message); final Spinner spinnerParents = (Spinner) excludeDialogLayout.findViewById(R.id.parents_folder); spinnerParents.getBackground().setColorFilter(getIconColor(), PorterDuff.Mode.SRC_ATOP); ((CardView) excludeDialogLayout.findViewById(R.id.message_card)).setCardBackgroundColor(getCardBackgroundColor()); textViewExcludeTitle.setBackgroundColor(getPrimaryColor()); textViewExcludeTitle.setText(getString(R.string.exclude)); if ((albumsMode && getAlbums().getSelectedCount() > 1)) { textViewExcludeMessage.setText(R.string.exclude_albums_message); spinnerParents.setVisibility(View.GONE); } else { textViewExcludeMessage.setText(R.string.exclude_album_message); spinnerParents.setAdapter(getSpinnerAdapter(albumsMode ? getAlbums().getSelectedAlbum(0).getParentsFolders() : getAlbum().getParentsFolders())); } textViewExcludeMessage.setTextColor(getTextColor()); excludeDialogBuilder.setView(excludeDialogLayout); excludeDialogBuilder.setPositiveButton(this.getString(R.string.exclude).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if ((albumsMode && getAlbums().getSelectedCount() > 1)) { getAlbums().excludeSelectedAlbums(getApplicationContext()); albumsAdapter.notifyDataSetChanged(); invalidateOptionsMenu(); } else { customAlbumsHelper.excludeAlbum(spinnerParents.getSelectedItem().toString()); finishEditMode(); displayAlbums(true); } } }); excludeDialogBuilder.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); AlertDialog alertDialogExclude = excludeDialogBuilder.create(); alertDialogExclude.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialogExclude); return true; case R.id.zipAlbumButton: path = new ArrayList<>(); File folder = new File(getAlbums().getSelectedAlbum(0).getPath() + "/"); File[] fpath = folder.listFiles(); for (int i = 0; i < fpath.length; i++) { if (fpath[i].getPath().endsWith(".jpg")||fpath[i].getPath().endsWith(".jpeg")||fpath[i].getPath().endsWith(".png")) { path.add(fpath[i].getPath()); } } new ZipAlbumTask().execute(); return true; case R.id.sharePhotos: Intent intent = new Intent(); intent.setAction(Intent.ACTION_SEND_MULTIPLE); intent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.sent_to_action)); // list of all selected media in current album ArrayList<Uri> files = new ArrayList<Uri>(); if (!all_photos && !fav_photos) { for (Media f : getAlbum().getSelectedMedia()) files.add(f.getUri()); } else if (all_photos && !fav_photos) { for (Media f : selectedMedias) files.add(f.getUri()); } else if (fav_photos && !all_photos) { for (Media m : selectedMedias) { files.add(m.getUri()); } } if (!all_photos && !fav_photos) { for (Media f : getAlbum().getSelectedMedia()) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); UploadHistoryRealmModel uploadHistory; uploadHistory = realm.createObject(UploadHistoryRealmModel.class); uploadHistory.setName("OTHERS"); uploadHistory.setPathname(f.getPath()); uploadHistory.setDatetime(new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date())); uploadHistory.setStatus(getString(R.string.upload_done)); realm.commitTransaction(); Intent result = new Intent(); result.putExtra(Constants.SHARE_RESULT, 0); setResult(RESULT_OK, result); } } else if (all_photos || fav_photos) { for (Media m : selectedMedias) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); UploadHistoryRealmModel uploadHistory; uploadHistory = realm.createObject(UploadHistoryRealmModel.class); uploadHistory.setName("OTHERS"); uploadHistory.setPathname(m.getPath()); uploadHistory.setDatetime(new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date())); uploadHistory.setStatus(getString(R.string.upload_done)); realm.commitTransaction(); Intent result = new Intent(); result.putExtra(Constants.SHARE_RESULT, 0); setResult(RESULT_OK, result); } } String extension = files.get(0).getPath().substring(files.get(0).getPath().lastIndexOf('.') + 1); String mimeType = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); intent.putParcelableArrayListExtra(Intent.EXTRA_STREAM, files); if (!all_photos && !fav_photos) intent.setType(StringUtils.getGenericMIME(getAlbum().getSelectedMedia(0).getMimeType())); else if (all_photos && !fav_photos) intent.setType(mimeType); else if (fav_photos && !all_photos) intent.setType(mimeType); finishEditMode(); startActivity(Intent.createChooser(intent, getResources().getText(R.string.send_to))); return true; case R.id.name_sort_action: if (albumsMode) { getAlbums().setDefaultSortingMode(NAME); new SortingUtilsAlbums(activityContext).execute(); } else { new SortModeSet(activityContext).execute(NAME); if (!all_photos && !fav_photos) { new SortingUtilsPhtots(activityContext).execute(); } else if (all_photos && !fav_photos) { new SortingUtilsListAll(activityContext).execute(); } else if (fav_photos && !all_photos) { new SortingUtilsFavouritelist(activityContext).execute(); } } item.setChecked(true); return true; case R.id.date_taken_sort_action: if (albumsMode) { getAlbums().setDefaultSortingMode(DATE); new SortingUtilsAlbums(activityContext).execute(); } else { new SortModeSet(activityContext).execute(DATE); if (!all_photos && !fav_photos) { new SortingUtilsPhtots(activityContext).execute(); } else if (all_photos && !fav_photos) { new SortingUtilsListAll(activityContext).execute(); } else if (fav_photos && !all_photos) { new SortingUtilsFavouritelist(activityContext).execute(); } } item.setChecked(true); return true; case R.id.size_sort_action: if (albumsMode) { getAlbums().setDefaultSortingMode(SIZE); new SortingUtilsAlbums(activityContext).execute(); } else { new SortModeSet(activityContext).execute(SIZE); if (!all_photos && !fav_photos) { new SortingUtilsPhtots(activityContext).execute(); } else if (all_photos && !fav_photos) { new SortingUtilsListAll(activityContext).execute(); } else if (fav_photos && !all_photos) { new SortingUtilsFavouritelist(activityContext).execute(); } } item.setChecked(true); return true; case R.id.numeric_sort_action: if (albumsMode) { getAlbums().setDefaultSortingMode(NUMERIC); new SortingUtilsAlbums(activityContext).execute(); } else { new SortModeSet(activityContext).execute(NUMERIC); if (!all_photos && !fav_photos) { new SortingUtilsPhtots(activityContext).execute(); } else if (all_photos && !fav_photos) { new SortingUtilsListAll(activityContext).execute(); } else if (fav_photos && !all_photos) { new SortingUtilsFavouritelist(activityContext).execute(); } } item.setChecked(true); return true; case R.id.ascending_sort_action: if (albumsMode) { getAlbums().setDefaultSortingAscending(item.isChecked() ? SortingOrder.DESCENDING : SortingOrder.ASCENDING); new SortingUtilsAlbums(activityContext).execute(); } else { getAlbum().setDefaultSortingAscending(getApplicationContext(), item.isChecked() ? SortingOrder.DESCENDING : SortingOrder.ASCENDING); if (!all_photos && !fav_photos) { new SortingUtilsPhtots(activityContext).execute(); } else if (all_photos && !fav_photos) { new SortingUtilsListAll(activityContext).execute(); } else if (fav_photos && !all_photos) { new SortingUtilsFavouritelist(activityContext).execute(); } } item.setChecked(!item.isChecked()); return true; //region Affix case R.id.affixPhoto: //region Async MediaAffix class affixMedia extends AsyncTask<Affix.Options, Integer, Void> { private AlertDialog dialog; @Override protected void onPreExecute() { AlertDialog.Builder progressDialog = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); dialog = AlertDialogsHelper.getProgressDialog(LFMainActivity.this, progressDialog, getString(R.string.affix), getString(R.string.affix_text)); dialog.show(); super.onPreExecute(); } @Override protected Void doInBackground(Affix.Options... arg0) { ArrayList<Bitmap> bitmapArray = new ArrayList<Bitmap>(); if (!all_photos) { for (int i = 0; i < getAlbum().getSelectedCount(); i++) { bitmapArray.add(getBitmap(getAlbum().getSelectedMedia(i).getPath())); } } else { for (int i = 0; i < selectedMedias.size(); i++) { bitmapArray.add(getBitmap(selectedMedias.get(i).getPath())); } } if (bitmapArray.size() > 1) Affix.AffixBitmapList(getApplicationContext(), bitmapArray, arg0[0]); else runOnUiThread(new Runnable() { @Override public void run() { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.affix_error), navigationView.getHeight()); } }); return null; } @Override protected void onPostExecute(Void result) { editMode = false; if (!all_photos) getAlbum().clearSelectedPhotos(); else clearSelectedPhotos(); dialog.dismiss(); invalidateOptionsMenu(); mediaAdapter.notifyDataSetChanged(); if (!all_photos) new PreparePhotosTask(activityContext).execute(); else clearSelectedPhotos(); } } //endregion final AlertDialog.Builder builder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final View dialogLayout = getLayoutInflater().inflate(R.layout.dialog_affix, null); dialogLayout.findViewById(R.id.affix_title).setBackgroundColor(getPrimaryColor()); ((CardView) dialogLayout.findViewById(R.id.affix_card)).setCardBackgroundColor(getCardBackgroundColor()); //ITEMS final SwitchCompat swVertical = (SwitchCompat) dialogLayout.findViewById(R.id.affix_vertical_switch); final SwitchCompat swSaveHere = (SwitchCompat) dialogLayout.findViewById(R.id.save_here_switch); final RadioGroup radioFormatGroup = (RadioGroup) dialogLayout.findViewById(R.id.radio_format); final TextView txtQuality = (TextView) dialogLayout.findViewById(R.id.affix_quality_title); final SeekBar seekQuality = (SeekBar) dialogLayout.findViewById(R.id.seek_bar_quality); //region THEME STUFF setScrollViewColor((ScrollView) dialogLayout.findViewById(R.id.affix_scrollView)); /** TextViews **/ int color = getTextColor(); ((TextView) dialogLayout.findViewById(R.id.affix_vertical_title)).setTextColor(color); ((TextView) dialogLayout.findViewById(R.id.compression_settings_title)).setTextColor(color); ((TextView) dialogLayout.findViewById(R.id.save_here_title)).setTextColor(color); /** Sub TextViews **/ color = getTextColor(); ((TextView) dialogLayout.findViewById(R.id.save_here_sub)).setTextColor(color); ((TextView) dialogLayout.findViewById(R.id.affix_vertical_sub)).setTextColor(color); ((TextView) dialogLayout.findViewById(R.id.affix_format_sub)).setTextColor(color); txtQuality.setTextColor(color); /** Icons **/ color = getIconColor(); ((IconicsImageView) dialogLayout.findViewById(R.id.affix_quality_icon)).setColor(color); ((IconicsImageView) dialogLayout.findViewById(R.id.affix_format_icon)).setColor(color); ((IconicsImageView) dialogLayout.findViewById(R.id.affix_vertical_icon)).setColor(color); ((IconicsImageView) dialogLayout.findViewById(R.id.save_here_icon)).setColor(color); seekQuality.getProgressDrawable().setColorFilter(new PorterDuffColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN)); seekQuality.getThumb().setColorFilter(new PorterDuffColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN)); updateRadioButtonColor((RadioButton) dialogLayout.findViewById(R.id.radio_jpeg)); updateRadioButtonColor((RadioButton) dialogLayout.findViewById(R.id.radio_png)); updateRadioButtonColor((RadioButton) dialogLayout.findViewById(R.id.radio_webp)); updateSwitchColor(swVertical, getAccentColor()); updateSwitchColor(swSaveHere, getAccentColor()); //endregion seekQuality.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { txtQuality.setText(Html.fromHtml( String.format(Locale.getDefault(), "%s <b>%d</b>", getString(R.string.quality), progress))); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); seekQuality.setProgress(90); //DEFAULT swVertical.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { updateSwitchColor(swVertical, getAccentColor()); } }); swSaveHere.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { updateSwitchColor(swSaveHere, getAccentColor()); } }); builder.setView(dialogLayout); builder.setPositiveButton(this.getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { Bitmap.CompressFormat compressFormat; switch (radioFormatGroup.getCheckedRadioButtonId()) { case R.id.radio_jpeg: default: compressFormat = Bitmap.CompressFormat.JPEG; break; case R.id.radio_png: compressFormat = Bitmap.CompressFormat.PNG; break; case R.id.radio_webp: compressFormat = Bitmap.CompressFormat.WEBP; break; } Affix.Options options = new Affix.Options( swSaveHere.isChecked() ? getAlbum().getPath() : Affix.getDefaultDirectoryPath(), compressFormat, seekQuality.getProgress(), swVertical.isChecked()); new affixMedia().execute(options); } }); builder.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); AlertDialog affixDialog = builder.create(); affixDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), affixDialog); return true; //endregion case R.id.action_move: final Snackbar[] snackbar = {null}; final ArrayList<Media> dr = getselecteditems(); final String[] pathofalbum = {null}; bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.move_to)); if (!albumsMode) { bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(final String path) { final ArrayList<Media> stringio = storeTemporaryphotos(path); pathofalbum[0] = path; swipeRefreshLayout.setRefreshing(true); int numberOfImagesMoved; if ((numberOfImagesMoved = getAlbum().moveSelectedMedia(getApplicationContext(), path)) > 0) { if (getAlbum().getMedia().size() == 0) { getAlbums().removeCurrentAlbum(); albumsAdapter.notifyDataSetChanged(); displayAlbums(); } mediaAdapter.swapDataSet(getAlbum().getMedia(), false); finishEditMode(); invalidateOptionsMenu(); checkForFavourites(path, dr); checkDescription(path, dr); if (numberOfImagesMoved > 1){ snackbar[0] = SnackBarHandler.showWithBottomMargin2(mDrawerLayout, getString(R.string.photos_moved_successfully), navigationView.getHeight(), Snackbar.LENGTH_SHORT); snackbar[0].setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { getAlbum().moveAllMedia(getApplicationContext(), getAlbum().getPath(), stringio); } }); snackbar[0].show(); } else{ Snackbar snackbar1 = SnackBarHandler.showWithBottomMargin2(mDrawerLayout, getString(R.string.photo_moved_successfully), navigationView.getHeight(), Snackbar.LENGTH_SHORT); snackbar1.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { getAlbum().moveAllMedia(getApplicationContext(), getAlbum().getPath(), stringio); } }); snackbar1.show(); } } else if (numberOfImagesMoved == -1 && getAlbum().getPath().equals(path)) { //moving to the same folder AlertDialog.Builder alertDialog = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); alertDialog.setCancelable(false); AlertDialogsHelper.getTextDialog(LFMainActivity.this, alertDialog, R.string.move_to, R.string.move, null); alertDialog.setNeutralButton(getString(R.string.make_copies).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { new CopyPhotos(path, true, false, activityContext).execute(); } }); alertDialog.setPositiveButton(getString(R.string.cancel).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); alertDialog.setNegativeButton(getString(R.string.replace).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { finishEditMode(); invalidateOptionsMenu(); SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.photo_moved_successfully), navigationView.getHeight()); } }); AlertDialog alert = alertDialog.create(); alert.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE, DialogInterface.BUTTON_NEUTRAL}, getAccentColor(), alert); } else requestSdCardPermissions(); swipeRefreshLayout.setRefreshing(false); bottomSheetDialogFragment.dismiss(); } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); } else { AlertDialog.Builder alertDialogMoveAll = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); alertDialogMoveAll.setCancelable(false); AlertDialogsHelper.getTextDialog(LFMainActivity.this, alertDialogMoveAll, R.string.move_to, R.string.move_all_photos, null); alertDialogMoveAll.setPositiveButton(R.string.ok_action, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); } }); alertDialogMoveAll.setNegativeButton(getString(R.string.cancel).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { swipeRefreshLayout.setRefreshing(true); if (getAlbums().moveSelectedAlbum(LFMainActivity.this, path)) { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.moved_target_folder_success), SnackBarHandler.LONG); getAlbums().deleteSelectedAlbums(LFMainActivity.this); getAlbums().clearSelectedAlbums(); new PrepareAlbumTask(activityContext).execute(); } else { requestSdCardPermissions(); swipeRefreshLayout.setRefreshing(false); invalidateOptionsMenu(); } bottomSheetDialogFragment.dismiss(); } }); AlertDialog dialog = alertDialogMoveAll.create(); dialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), dialog); } return true; case R.id.action_add_favourites: new AddToFavourites().execute(); return true; case R.id.action_copy: bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.copy_to)); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { new CopyPhotos(path, false, true, activityContext).execute(); bottomSheetDialogFragment.dismiss(); } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); return true; case R.id.renameAlbum: AlertDialog.Builder renameDialogBuilder = new AlertDialog.Builder(LFMainActivity.this, getDialogStyle()); final EditText editTextNewName = new EditText(getApplicationContext()); editTextNewName.setText(albumsMode ? getAlbums().getSelectedAlbum(0).getName() : getAlbum().getName()); editTextNewName.setSelectAllOnFocus(true); editTextNewName.setHint(R.string.description_hint); editTextNewName.setHintTextColor(ContextCompat.getColor(getApplicationContext(), R.color.grey)); editTextNewName.setHighlightColor(ContextCompat.getColor(getApplicationContext(), R.color.cardview_shadow_start_color)); editTextNewName.selectAll(); editTextNewName.setSingleLine(false); final String albumName = albumsMode ? getAlbums().getSelectedAlbum(0).getName() : getAlbum().getName(); AlertDialogsHelper.getInsertTextDialog(LFMainActivity.this, renameDialogBuilder, editTextNewName, R.string.rename_album, null); renameDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); renameDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later //to avoid dismiss of the dialog } }); final AlertDialog renameDialog = renameDialogBuilder.create(); renameDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION); editTextNewName.setSelection(editTextNewName.getText().toString().length()); renameDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), renameDialog); renameDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat.getColor(LFMainActivity.this, R.color.grey), renameDialog); editTextNewName.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (TextUtils.isEmpty(editable)) { // Disable ok button renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat.getColor(LFMainActivity.this, R.color.grey), renameDialog); } else { // Something into edit text. Enable the button. renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(true); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), renameDialog); } } }); renameDialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View dialog) { boolean rename = false; if (editTextNewName.length() != 0) { swipeRefreshLayout.setRefreshing(true); boolean success = false; if (albumsMode) { if (!editTextNewName.getText().toString().equals(albumName)) { int index = getAlbums().dispAlbums.indexOf(getAlbums().getSelectedAlbum(0)); getAlbums().getAlbum(index).updatePhotos(getApplicationContext()); success = getAlbums().getAlbum(index).renameAlbum(getApplicationContext(), editTextNewName.getText().toString()); albumsAdapter.notifyItemChanged(index); } else { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.rename_no_change), navigationView.getHeight()); rename = true; } } else { if (!editTextNewName.getText().toString().equals(albumName)) { success = getAlbum().renameAlbum(getApplicationContext(), editTextNewName.getText().toString()); toolbar.setTitle(getAlbum().getName()); mediaAdapter.notifyDataSetChanged(); } else { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.rename_no_change), navigationView.getHeight()); rename = true; } } renameDialog.dismiss(); if (success) { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.rename_succes), navigationView.getHeight()); getAlbums().clearSelectedAlbums(); invalidateOptionsMenu(); } else if (!rename) { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.rename_error), navigationView.getHeight()); requestSdCardPermissions(); } swipeRefreshLayout.setRefreshing(false); } else { SnackBarHandler.showWithBottomMargin(mDrawerLayout, getString(R.string.insert_something), navigationView.getHeight()); editTextNewName.requestFocus(); } } }); return true; case R.id.clear_album_preview: if (!albumsMode) { getAlbum().removeCoverAlbum(getApplicationContext()); } return true; case R.id.setAsAlbumPreview: if (!albumsMode) { getAlbum().setSelectedPhotoAsPreview(getApplicationContext()); finishEditMode(); } return true; default: // If we got here, the user's action was not recognized. // Invoke the superclass to handle it. return super.onOptionsItemSelected(item); } } private void checkForShare(ArrayList<Media> media){ realm = Realm.getDefaultInstance(); RealmQuery<UploadHistoryRealmModel> uploadHistoryRealmModelRealmQuery = realm.where(UploadHistoryRealmModel.class); for(Media m: media){ checkForUploadHistory(m.getPath(), uploadHistoryRealmModelRealmQuery); } } private void checkForUploadHistory(String path, RealmQuery<UploadHistoryRealmModel> query){ for(int i = 0; i < query.count(); i++){ if(query.findAll().get(i).getPathname().equals(path) && backupHistory(path)){ uploadToRealm(path); } } } private boolean backupHistory(String path){ boolean succ = false; File file = new File(Environment.getExternalStorageDirectory() + "/" +".nomedia/" + "uploadHistory"); if(file.exists() && file.isDirectory()){ succ = ContentHelper.copyFile(getApplicationContext(), new File(path), file); //succ = getAlbum().moveAnyMedia(getApplicationContext(), file.getAbsolutePath(), path); } else { if(file.mkdir()){ succ = ContentHelper.copyFile(getApplicationContext(), new File(path), file); } } return succ; } private void uploadToRealm(String path){ RealmResults<UploadHistoryRealmModel> realmModels = realm.where(UploadHistoryRealmModel.class).equalTo("pathname", path).findAll(); //RealmResults<UploadHistoryRealmModel> realmModels = realm.where(UploadHistoryRealmModel.class).findAll(); String newpath = Environment.getExternalStorageDirectory() + "/" + ".nomedia/" + "uploadHistory/" + path.substring(path.lastIndexOf("/") + 1); realm.beginTransaction(); UploadHistoryRealmModel uploadHistoryRealmModel = realm.createObject(UploadHistoryRealmModel.class); uploadHistoryRealmModel.setDatetime(realmModels.get(0).getDatetime()); uploadHistoryRealmModel.setName(realmModels.get(0).getName()); uploadHistoryRealmModel.setPathname(newpath); uploadHistoryRealmModel.setStatus(realmModels.get(0).getStatus()); realm.commitTransaction(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<UploadHistoryRealmModel> realmModels = realm.where(UploadHistoryRealmModel.class).findAll(); realmModels.deleteAllFromRealm(); } }); } private ArrayList<Media> storeTemporaryphotos(String path){ ArrayList<Media> temp = new ArrayList<>(); if(!all_photos && !fav_photos && editMode){ for(Media m: getAlbum().getSelectedMedia()){ String name = m.getPath().substring(m.getPath().lastIndexOf("/") + 1); temp.add(new Media(path + "/" + name)); } } return temp; } private void checkDescription(String newpath, ArrayList<Media> selecteditems){ for(int i = 0; i < selecteditems.size(); i++){ getDescriptionPaths(selecteditems.get(i).getPath(), newpath); } } private void performRealmAction(final ImageDescModel descModel, String newpath){ realm = Realm.getDefaultInstance(); int index = descModel.getId().lastIndexOf("/"); String name = descModel.getId().substring(index + 1); String newpathy = newpath + "/" + name; realm.beginTransaction(); ImageDescModel imageDescModel = realm.createObject(ImageDescModel.class, newpathy); imageDescModel.setTitle(descModel.getTitle()); realm.commitTransaction(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<ImageDescModel> result = realm.where(ImageDescModel.class).equalTo ("path", descModel.getId()).findAll(); result.deleteAllFromRealm(); } }); } private void getDescriptionPaths(String patjs, String newpth){ realm = Realm.getDefaultInstance(); RealmQuery<ImageDescModel> realmQuery = realm.where(ImageDescModel.class); for(int i = 0; i < realmQuery.count(); i++) { if (realmQuery.findAll().get(i).getId().equals(patjs)) { performRealmAction(realmQuery.findAll().get(i), newpth); break; } } } private void checkForFavourites(String path, ArrayList<Media> selectedphotos){ for(Media m: selectedphotos){ checkIfFav(m.getPath(), path); } } private void checkIfFav(String currentpath, String newpath){ realm = Realm.getDefaultInstance(); RealmQuery<FavouriteImagesModel> favouriteImagesModelRealmQuery = realm.where(FavouriteImagesModel.class); for(int i = 0; i < favouriteImagesModelRealmQuery.count(); i++){ if(favouriteImagesModelRealmQuery.findAll().get(i).getPath().equals(currentpath)){ performAddToFavOp(favouriteImagesModelRealmQuery.findAll().get(i), newpath); break; } } } private void performAddToFavOp(final FavouriteImagesModel favouriteImagesModel, String newpath) { realm = Realm.getDefaultInstance(); int index = favouriteImagesModel.getPath().lastIndexOf("/"); String name = favouriteImagesModel.getPath().substring(index + 1); String newpathy = newpath + "/" + name; realm.beginTransaction(); FavouriteImagesModel favouriteImagesModel1 = realm.createObject(FavouriteImagesModel.class, newpathy); ImageDescModel q = realm.where(ImageDescModel.class).equalTo("path", favouriteImagesModel.getPath()).findFirst(); if (q != null) { favouriteImagesModel1.setDescription(q.getTitle()); } else { favouriteImagesModel1.setDescription(" "); } realm.commitTransaction(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> result = realm.where(FavouriteImagesModel.class).equalTo ("path", favouriteImagesModel.getPath()).findAll(); result.deleteAllFromRealm(); } }); } private boolean addToTrash(){ int no = 0; boolean succ = false; final ArrayList<Media> media1 = storeDeletedFilesTemporarily(); File file = new File(Environment.getExternalStorageDirectory() + "/" + ".nomedia"); if(file.exists() && file.isDirectory()){ if (albumsMode) { no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), selectedAlbumMedia); } else if(!all_photos && !fav_photos && editMode){ no = getAlbum().moveSelectedMedia(getApplicationContext(), file.getAbsolutePath()); }else if(all_photos && !fav_photos && editMode){ no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), selectedMedias); }else if(!editMode && !all_photos && !fav_photos){ no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), getAlbum().getMedia()); } if(no > 0){ succ = true; if(no == 1){ Snackbar snackbar = SnackBarHandler.showWithBottomMargin2(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move_onefile), navigationView.getHeight (), Snackbar.LENGTH_SHORT); snackbar.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { if (albumsMode) { undoAlbumDeletion(media1); }else getAlbum().moveAllMedia(getApplicationContext(), getAlbum().getPath(), media1); refreshListener.onRefresh(); } }); snackbar.show(); }else{ Snackbar snackbar = SnackBarHandler.showWithBottomMargin2(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move_onefile), navigationView.getHeight (), Snackbar.LENGTH_SHORT); snackbar.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { if (albumsMode) { undoAlbumDeletion(media1); }else getAlbum().moveAllMedia(getApplicationContext(), getAlbum().getPath(), media1); refreshListener.onRefresh(); } }); snackbar.show(); } }else{ SnackBarHandler.showWithBottomMargin(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } }else{ if(file.mkdir()){ if (albumsMode) { no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), selectedAlbumMedia); }else if(!all_photos && !fav_photos && editMode){ no = getAlbum().moveSelectedMedia(getApplicationContext(), file.getAbsolutePath()); }else if(all_photos && !fav_photos && editMode){ no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), selectedMedias); }else if(!editMode && !all_photos && !fav_photos){ no = getAlbum().moveAllMedia(getApplicationContext(), file.getAbsolutePath(), getAlbum().getMedia()); } // no = getAlbum().moveSelectedMedia(getApplicationContext(), file.getAbsolutePath()); if(no > 0){ succ = true; if(no == 1){ Snackbar snackbar = SnackBarHandler.showWithBottomMargin(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move_onefile), navigationView.getHeight ()); snackbar.setAction(R.string.ok_action, new View.OnClickListener() { @Override public void onClick(View view) { if (albumsMode) { undoAlbumDeletion(media1); } refreshListener.onRefresh(); } }); }else{ Snackbar snackbar = SnackBarHandler.showWithBottomMargin(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move), navigationView.getHeight ()); snackbar.setAction(R.string.ok_action, new View.OnClickListener() { @Override public void onClick(View view) { if (albumsMode) { undoAlbumDeletion(media1); } refreshListener.onRefresh(); } }); } }else{ SnackBarHandler.showWithBottomMargin(mDrawerLayout, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } } } // clearSelectedPhotos(); return succ; } private ArrayList<Media> storeDeletedFilesTemporarily(){ ArrayList<Media> deletedImages = new ArrayList<>(); if(albumsMode) { selectedAlbumMedia.clear(); for (Album selectedAlbum : getAlbums().getSelectedAlbums()) { checkAndAddFolder(new File(selectedAlbum.getPath()), deletedImages); } }else if(!all_photos && !fav_photos && editMode){ for(Media m: getAlbum().getSelectedMedia()){ String name = m.getPath().substring(m.getPath().lastIndexOf("/") + 1); deletedImages.add(new Media(Environment.getExternalStorageDirectory() + "/" + ".nomedia" + "/" + name)); } } else if(all_photos && !fav_photos && editMode){ for(Media m: selectedMedias){ String name = m.getPath().substring(m.getPath().lastIndexOf("/") + 1); deletedImages.add(new Media(Environment.getExternalStorageDirectory() + "/" + ".nomedia" + "/" + name)); } } return deletedImages; } private void addTrashObjectsToRealm(ArrayList<Media> media){ String trashbinpath = Environment.getExternalStorageDirectory() + "/" + ".nomedia"; realm = Realm.getDefaultInstance(); for(int i = 0; i < media.size(); i++){ int index = media.get(i).getPath().lastIndexOf("/"); String name = media.get(i).getPath().substring(index + 1); realm.beginTransaction(); String trashpath = trashbinpath + "/" + name; TrashBinRealmModel trashBinRealmModel = realm.createObject(TrashBinRealmModel.class, trashpath); trashBinRealmModel.setOldpath(media.get(i).getPath()); trashBinRealmModel.setDatetime(new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date())); trashBinRealmModel.setTimeperiod("null"); realm.commitTransaction(); } } private void checkAndAddFolder(File dir, ArrayList<Media> deletedImages) { File[] files = dir.listFiles(new ImageFileFilter(false)); if (files != null && files.length > 0) { for (File file : files) { selectedAlbumMedia.add(new Media(file.getAbsolutePath())); String name = file.getAbsolutePath().substring(file.getAbsolutePath().lastIndexOf("/") + 1); Media media = new Media(Environment.getExternalStorageDirectory() + "/" + ".nomedia" + "/" +name ); deletedImages.add(media); } } } private void undoAlbumDeletion(ArrayList<Media> deleteImages) { for (int i = 0; i < deleteImages.size(); i++) { String oldPath = selectedAlbumMedia.get(i).getPath(); String oldFolder = oldPath.substring(0, oldPath.lastIndexOf("/")); if (restoreMove(LFMainActivity.this, deleteImages.get(i).getPath(), oldFolder)) { String datafrom = deleteImages.get(i).getPath(); scanFile(context, new String[]{ datafrom, StringUtils.getPhotoPathMoved (datafrom,oldFolder) }); } } for (int i = 0; i < deleteImages.size(); i++) { removeFromRealm(deleteImages.get(i).getPath()); } refreshListener.onRefresh(); } private boolean restoreMove(Context context, String source, String targetDir){ File from = new File(source); File to = new File(targetDir); return ContentHelper.moveFile(context, from, to); } private void removeFromRealm(String path){ Realm realm = Realm.getDefaultInstance(); RealmResults<TrashBinRealmModel> result = realm.where(TrashBinRealmModel.class).equalTo ("trashbinpath", path).findAll(); realm.beginTransaction(); result.deleteAllFromRealm(); realm.commitTransaction(); } private static class SortModeSet extends AsyncTask<SortingMode, Void, Void> { private WeakReference<LFMainActivity> reference; public SortModeSet(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected Void doInBackground(SortingMode... sortingModes) { for (Album a : getAlbums().dispAlbums) { if (a.settings.getSortingMode().getValue() != sortingModes[0].getValue()) { a.setDefaultSortingMode(reference.get(), sortingModes[0]); } } return null; } } public Bitmap getBitmap(String path) { Uri uri = Uri.fromFile(new File(path)); InputStream in = null; try { final int IMAGE_MAX_SIZE = 1200000; // 1.2MP in = getContentResolver().openInputStream(uri); // Decode image size BitmapFactory.Options o = new BitmapFactory.Options(); o.inJustDecodeBounds = true; BitmapFactory.decodeStream(in, null, o); in.close(); int scale = 1; while ((o.outWidth * o.outHeight) * (1 / Math.pow(scale, 2)) > IMAGE_MAX_SIZE) { scale++; } Bitmap bitmap = null; in = getContentResolver().openInputStream(uri); if (scale > 1) { scale--; // scale to max possible inSampleSize that still yields an image // larger than target o = new BitmapFactory.Options(); o.inSampleSize = scale; bitmap = BitmapFactory.decodeStream(in, null, o); // resize to desired dimensions int height = bitmap.getHeight(); int width = bitmap.getWidth(); double y = Math.sqrt(IMAGE_MAX_SIZE / (((double) width) / height)); double x = (y / height) * width; Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap, (int) x, (int) y, true); bitmap.recycle(); bitmap = scaledBitmap; System.gc(); } else { bitmap = BitmapFactory.decodeStream(in); } in.close(); Log.d(TAG, "bitmap size - width: " + bitmap.getWidth() + ", height: " + bitmap.getHeight()); return bitmap; } catch (IOException e) { Log.e(TAG, e.getMessage(), e); return null; } } public void getNavigationBar() { if (editMode && hidenav) { showNavigationBar(); hidenav = false; } } //to copy from all photos. private boolean copyfromallphotos(Context context, String folderPath) { boolean success = false; for (Media m : selectedMedias) { try { File from = new File(m.getPath()); File to = new File(folderPath); if (success = ContentHelper.copyFile(context, from, to)) scanFile(context, new String[]{StringUtils.getPhotoPathMoved(m.getPath(), folderPath)}); } catch (Exception e) { e.printStackTrace(); } } return success; } public void scanFile(Context context, String[] path) { MediaScannerConnection.scanFile(context, path, null, null); } /** * If we are in albumsMode, make the albums recyclerView visible. If we are not, make media recyclerView visible. * * @param albumsMode it indicates whether we are in album selection mode or not */ private void toggleRecyclersVisibility(boolean albumsMode) { rvAlbums.setVisibility(albumsMode ? View.VISIBLE : View.GONE); rvMedia.setVisibility(albumsMode ? View.GONE : View.VISIBLE); nothingToShow.setVisibility(View.GONE); starImageView.setVisibility(View.GONE); if (albumsMode) fabScrollUp.hide(); //touchScrollBar.setScrollBarHidden(albumsMode); } private void tint() { if (localFolder) { defaultIcon.setColor(getPrimaryColor()); defaultText.setTextColor(getPrimaryColor()); hiddenIcon.setColor(getIconColor()); hiddenText.setTextColor(getTextColor()); } else { hiddenIcon.setColor(getPrimaryColor()); hiddenText.setTextColor(getPrimaryColor()); defaultIcon.setColor(getIconColor()); defaultText.setTextColor(getTextColor()); } } /** * handles back presses. * If search view is open, back press will close it. * If we are currently in selection mode, back press will take us out of selection mode. * If we are not in selection mode but in albumsMode and the drawer is open, back press will close it. * If we are not in selection mode but in albumsMode and the drawer is closed, finish the activity. * If we are neither in selection mode nor in albumsMode, display the albums again. */ @Override public void onBackPressed() { checkForReveal = true; if (!searchView.isIconified()) searchView.setIconified(true); if ((editMode && all_photos) || (editMode && fav_photos)) clearSelectedPhotos(); getNavigationBar(); if (editMode) finishEditMode(); else { if (albumsMode) { if (mDrawerLayout.isDrawerOpen(GravityCompat.START)) mDrawerLayout.closeDrawer(GravityCompat.START); else { if (doubleBackToExitPressedOnce && isTaskRoot()) finish(); else if (isTaskRoot()) { doubleBackToExitPressedOnce = true; View rootView = LFMainActivity.this.getWindow().getDecorView().findViewById(android.R.id.content); Snackbar snackbar = Snackbar .make(rootView, R.string.press_back_again_to_exit, Snackbar.LENGTH_LONG) .setAction(R.string.exit, new View.OnClickListener() { @Override public void onClick(View view) { finishAffinity(); } }) .setActionTextColor(getAccentColor()); View sbView = snackbar.getView(); final FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) sbView.getLayoutParams(); params.setMargins(params.leftMargin, params.topMargin, params.rightMargin, params.bottomMargin + navigationView.getHeight()); sbView.setLayoutParams(params); snackbar.show(); new Handler().postDelayed(new Runnable() { @Override public void run() { doubleBackToExitPressedOnce = false; } }, 2000); } else super.onBackPressed(); } } else { displayAlbums(); } } } private class CreateGIFTask extends AsyncTask<Void, Void, Void>{ private ArrayList<Bitmap> bitmaps = new ArrayList<>(); @Override protected void onPreExecute() { super.onPreExecute(); swipeRefreshLayout.setRefreshing(true); } @Override protected Void doInBackground(Void... voids) { if(!albumsMode && !all_photos && !fav_photos){ for(Media m: getAlbum().getSelectedMedia()){ bitmaps.add(getBitmap(m.getPath())); } }else if(!albumsMode && all_photos && !fav_photos){ for(Media m: selectedMedias){ bitmaps.add(getBitmap(m.getPath())); } } byte[] bytes = createGIFFromImages(bitmaps); File file = new File(Environment.getExternalStorageDirectory() + "/" + "Phimpme_gifs"); DateFormat dateFormat = new SimpleDateFormat("ddMMyy_HHmm"); String date = dateFormat.format(Calendar.getInstance().getTime()); if(file.exists() && file.isDirectory()){ FileOutputStream outStream = null; try{ outStream = new FileOutputStream(file.getPath() + "/" + "GIF_"+date+".gif"); outStream.write(bytes); outStream.close(); }catch(Exception e){ e.printStackTrace(); } }else { if (file.mkdir()) { FileOutputStream outStream = null; try { outStream = new FileOutputStream(file.getPath() + "/" + "GIF_"+date+".gif"); outStream.write(bytes); outStream.close(); } catch (Exception e) { e.printStackTrace(); } } } return null; } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); if(!albumsMode && !all_photos && !fav_photos){ getAlbum().clearSelectedPhotos(); }else if(!albumsMode && all_photos && !fav_photos){ clearSelectedPhotos(); } swipeRefreshLayout.setRefreshing(false); } } private byte[] createGIFFromImages(ArrayList<Bitmap> bitmaps){ ByteArrayOutputStream bos = new ByteArrayOutputStream(); AnimatedGifEncoder encoder = new AnimatedGifEncoder(); encoder.start(bos); for (Bitmap bitmap : bitmaps) { encoder.addFrame(bitmap); } encoder.finish(); return bos.toByteArray(); } private class CreateZipTask extends AsyncTask<Void, Integer, String> { @Override protected void onPreExecute() { super.onPreExecute(); swipeRefreshLayout.setRefreshing(true); NotificationHandler.make(R.string.Images, R.string.zip_fol, R.drawable.ic_archive_black_24dp); } @Override protected String doInBackground(Void... voids) { DateFormat dateFormat = new SimpleDateFormat("ddMMyy_HHmm"); String dateAndTime = dateFormat.format(Calendar.getInstance().getTime()); try { double c = 0.0; File file = new File(Environment.getExternalStorageDirectory() + "/" + "Phimpme_ImageZip"); FileOutputStream dest = null; if(file.exists() && file.isDirectory()){ try{ dest = new FileOutputStream(file.getPath() + "/" + "ZIP_"+dateAndTime+".zip"); }catch(Exception e){ e.printStackTrace(); } }else { if (file.mkdir()) { dest = null; try { dest = new FileOutputStream(file.getPath() + "/" + "ZIP_"+dateAndTime+".zip"); } catch (Exception e) { e.printStackTrace(); } } } BufferedInputStream origin = null; ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream( dest)); byte data[] = new byte[BUFFER]; for (int i = 0; i < path.size(); i++) { FileInputStream fi = new FileInputStream(path.get(i)); origin = new BufferedInputStream(fi, BUFFER); ZipEntry entry = new ZipEntry(path.get(i).substring(path.get(i).lastIndexOf("/") + 1)); out.putNextEntry(entry); c++; if ((int) ((c / size) * 100) > 100) { NotificationHandler.actionProgress((int) c, path.size(), 100, R.string.zip_operation); } else { NotificationHandler.actionProgress((int) c, path.size(), (int) ((c / path.size()) * 100), R.string .zip_operation); } int count; while ((count = origin.read(data, 0, BUFFER)) != -1) { out.write(data, 0, count); } origin.close(); } out.close(); if (isCancelled()) { return null; } } catch (Exception e) { e.printStackTrace(); } return dateAndTime; } @Override protected void onPostExecute(String dateAndTime) { super.onPostExecute(dateAndTime); NotificationHandler.actionPassed(R.string.zip_completion); String path = "ZIP: "+dateAndTime+".zip"; SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.zip_location) + path); if(!albumsMode && !all_photos && !fav_photos){ getAlbum().clearSelectedPhotos(); } else if(!albumsMode && all_photos && !fav_photos){ clearSelectedPhotos(); } swipeRefreshLayout.setRefreshing(false); } } private class ZipAlbumTask extends AsyncTask<Void, Integer, Void> { @Override protected void onPreExecute() { super.onPreExecute(); NotificationHandler.make(R.string.folder, R.string.zip_fol, R.drawable.ic_archive_black_24dp); } @Override protected Void doInBackground(Void... voids) { try { double c = 0.0; BufferedInputStream origin = null; FileOutputStream dest = new FileOutputStream(getAlbums().getSelectedAlbum(0).getParentsFolders().get (1) + "/" + getAlbums().getSelectedAlbum(0).getName() + ".zip"); ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream( dest)); byte data[] = new byte[BUFFER]; for (int i = 0; i < path.size(); i++) { FileInputStream fi = new FileInputStream(path.get(i)); origin = new BufferedInputStream(fi, BUFFER); ZipEntry entry = new ZipEntry(path.get(i).substring(path.get(i).lastIndexOf("/") + 1)); out.putNextEntry(entry); c++; if ((int) ((c / size) * 100) > 100) { NotificationHandler.actionProgress((int) c, path.size(), 100, R.string.zip_operation); } else { NotificationHandler.actionProgress((int) c, path.size(), (int) ((c / path.size()) * 100), R.string .zip_operation); } int count; while ((count = origin.read(data, 0, BUFFER)) != -1) { out.write(data, 0, count); } origin.close(); } out.close(); if (isCancelled()) { return null; } } catch (Exception e) { e.printStackTrace(); } return null; } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); NotificationHandler.actionPassed(R.string.zip_completion); String path = getAlbums().getSelectedAlbum(0).getParentsFolders().get(1) + getAlbums().getSelectedAlbum (0).getName() + ".zip"; SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.zip_location) + path); getAlbums().clearSelectedAlbums(); albumsAdapter.notifyDataSetChanged(); invalidateOptionsMenu(); } } private static class PrepareAlbumTask extends AsyncTask<Void, Integer, Void> { private WeakReference<LFMainActivity> reference; PrepareAlbumTask(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); asyncActivityRef.toggleRecyclersVisibility(true); if(!asyncActivityRef.navigationView.isShown()){ asyncActivityRef.navigationView.setVisibility(View.VISIBLE); } super.onPreExecute(); } @Override protected Void doInBackground(Void... arg0) { LFMainActivity asynActivityRef = reference.get(); getAlbums().loadAlbums(asynActivityRef.getApplicationContext(), asynActivityRef.hidden); return null; } @Override protected void onPostExecute(Void result) { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.albumsAdapter.swapDataSet(getAlbums().dispAlbums); asyncActivityRef.albList = new ArrayList<>(); asyncActivityRef.populateAlbum(); asyncActivityRef.checkNothing(); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); getAlbums().saveBackup(asyncActivityRef); asyncActivityRef.invalidateOptionsMenu(); asyncActivityRef.finishEditMode(); } } private static class PreparePhotosTask extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; PreparePhotosTask(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { // Declaring globally in Async might lead to leakage of the context LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); asyncActivityRef.toggleRecyclersVisibility(false); super.onPreExecute(); } @Override protected Void doInBackground(Void... arg0) { reference.get().getAlbum().updatePhotos(reference.get()); return null; } @Override protected void onPostExecute(Void result) { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.mediaAdapter.swapDataSet(asyncActivityRef.getAlbum().getMedia(), false); if (!asyncActivityRef.hidden) HandlingAlbums.addAlbumToBackup(asyncActivityRef, reference.get().getAlbum()); asyncActivityRef.checkNothing(); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.invalidateOptionsMenu(); asyncActivityRef.finishEditMode(); } } private static class PrepareAllPhotos extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; PrepareAllPhotos(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); asyncActivityRef.toggleRecyclersVisibility(false); super.onPreExecute(); } @Override protected Void doInBackground(Void... arg0) { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.getAlbum().updatePhotos(asyncActivityRef); return null; } @Override protected void onPostExecute(Void result) { LFMainActivity asyncActivityRef = reference.get(); listAll = StorageProvider.getAllShownImages(asyncActivityRef); asyncActivityRef.size = listAll.size(); Collections.sort(listAll, MediaComparators.getComparator(asyncActivityRef.getAlbum().settings.getSortingMode(), asyncActivityRef.getAlbum().settings.getSortingOrder())); asyncActivityRef.mediaAdapter.swapDataSet(listAll, false); if (!asyncActivityRef.hidden) HandlingAlbums.addAlbumToBackup(asyncActivityRef, asyncActivityRef.getAlbum()); asyncActivityRef.checkNothing(); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.invalidateOptionsMenu(); asyncActivityRef.finishEditMode(); asyncActivityRef.toolbar.setTitle(asyncActivityRef.getString(R.string.all_media)); asyncActivityRef.clearSelectedPhotos(); } } private static class FavouritePhotos extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; FavouritePhotos(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); asyncActivityRef.toggleRecyclersVisibility(false); asyncActivityRef.navigationView.setVisibility(View.INVISIBLE); super.onPreExecute(); } @Override protected Void doInBackground(Void... arg0) { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.getAlbum().updatePhotos(asyncActivityRef); return null; } @Override protected void onPostExecute(Void result) { LFMainActivity asyncActivityRef = reference.get(); Collections.sort(asyncActivityRef.favouriteslist, MediaComparators.getComparator(asyncActivityRef.getAlbum().settings.getSortingMode(), asyncActivityRef.getAlbum().settings.getSortingOrder())); asyncActivityRef.mediaAdapter.swapDataSet(asyncActivityRef.favouriteslist, true); asyncActivityRef.checkNothingFavourites(); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.invalidateOptionsMenu(); asyncActivityRef.finishEditMode(); asyncActivityRef.toolbar.setTitle(asyncActivityRef.getResources().getString(R.string.favourite_title)); asyncActivityRef.clearSelectedPhotos(); } } /* AsyncTask for Add to favourites operation */ private class AddToFavourites extends AsyncTask<Void, Integer, Integer>{ @Override protected void onPreExecute() { getNavigationBar(); swipeRefreshLayout.setRefreshing(true); super.onPreExecute(); } @Override protected Integer doInBackground(Void... voids) { int count = 0; realm = Realm.getDefaultInstance(); ArrayList<Media> favadd; if (!all_photos) { favadd = getAlbum().getSelectedMedia(); } else { favadd = selectedMedias; } for (int i = 0; i < favadd.size(); i++) { String realpath = favadd.get(i).getPath(); RealmQuery<FavouriteImagesModel> query = realm.where(FavouriteImagesModel.class).equalTo("path", realpath); if (query.count() == 0) { count++; realm.beginTransaction(); FavouriteImagesModel fav = realm.createObject(FavouriteImagesModel.class, realpath); ImageDescModel q = realm.where(ImageDescModel.class).equalTo("path", realpath).findFirst(); if (q != null) { fav.setDescription(q.getTitle()); } else { fav.setDescription(" "); } realm.commitTransaction(); } } return count; } @Override protected void onPostExecute(Integer count) { super.onPostExecute(count); swipeRefreshLayout.setRefreshing(false); finishEditMode(); if (count == 0) { SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.check_favourite_multipleitems)); } else if (count == 1) { final Snackbar snackbar = SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.add_favourite) ); snackbar.setAction(R.string.openfav, new View.OnClickListener() { @Override public void onClick(View view) { displayfavourites(); favourites = false; } }); snackbar.show(); } else { SnackBarHandler.show(mDrawerLayout, count + " " + getResources().getString(R.string .add_favourite_multiple)); final Snackbar snackbar = SnackBarHandler.show(mDrawerLayout, getResources().getString(R.string.add_favourite) ); snackbar.setAction(R.string.openfav, new View.OnClickListener() { @Override public void onClick(View view) { displayfavourites(); favourites = false; } }); snackbar.show(); } mediaAdapter.notifyDataSetChanged(); } } /* Async Class for Sorting Photos - NOT listAll */ private static class SortingUtilsPhtots extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; SortingUtilsPhtots(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); super.onPreExecute(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); } @Override protected Void doInBackground(Void... aVoid) { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.getAlbum().sortPhotos(); return null; } protected void onPostExecute(Void aVoid) { LFMainActivity asyncActivityRef = reference.get(); super.onPostExecute(aVoid); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.mediaAdapter.swapDataSet(asyncActivityRef.getAlbum().getMedia(), false); } } /* Async Class for Sorting Photos - listAll */ private static class SortingUtilsListAll extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; SortingUtilsListAll(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); super.onPreExecute(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); } @Override protected Void doInBackground(Void... aVoid) { LFMainActivity asyncActivityRef = reference.get(); Collections.sort(listAll, MediaComparators.getComparator(asyncActivityRef.getAlbum().settings.getSortingMode(), asyncActivityRef.getAlbum().settings.getSortingOrder())); return null; } @Override protected void onPostExecute(Void aVoid) { LFMainActivity asyncActivityRef = reference.get(); super.onPostExecute(aVoid); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.mediaAdapter.swapDataSet(listAll, false); } } /* Async Class for Sorting Favourites */ private static class SortingUtilsFavouritelist extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; SortingUtilsFavouritelist(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); super.onPreExecute(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); } @Override protected Void doInBackground(Void... aVoid) { LFMainActivity asyncActivityRef = reference.get(); Collections.sort(asyncActivityRef.favouriteslist, MediaComparators.getComparator(asyncActivityRef.getAlbum().settings.getSortingMode(), asyncActivityRef.getAlbum().settings.getSortingOrder())); return null; } @Override protected void onPostExecute(Void aVoid) { LFMainActivity asyncActivityRef = reference.get(); super.onPostExecute(aVoid); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.mediaAdapter.swapDataSet(asyncActivityRef.favouriteslist, true); } } /* Async Class for Sorting Albums */ private static class SortingUtilsAlbums extends AsyncTask<Void, Void, Void> { private WeakReference<LFMainActivity> reference; SortingUtilsAlbums(LFMainActivity reference) { this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); super.onPreExecute(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); } @Override protected Void doInBackground(Void... aVoid) { getAlbums().sortAlbums(); return null; } @Override protected void onPostExecute(Void aVoid) { LFMainActivity asyncActivityRef = reference.get(); super.onPostExecute(aVoid); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.albumsAdapter.swapDataSet(getAlbums().dispAlbums); new PrepareAlbumTask(asyncActivityRef.activityContext).execute(); } } /* Async Class for coping images */ private class CopyPhotos extends AsyncTask<String, Integer, Boolean> { private WeakReference<LFMainActivity> reference; private String path; private Snackbar snackbar; private ArrayList<Media> temp; private Boolean moveAction, copyAction, success; CopyPhotos(String path, Boolean moveAction, Boolean copyAction, LFMainActivity reference) { this.path = path; this.moveAction = moveAction; this.copyAction = copyAction; this.reference = new WeakReference<>(reference); } @Override protected void onPreExecute() { LFMainActivity asyncActivityRef = reference.get(); asyncActivityRef.swipeRefreshLayout.setRefreshing(true); super.onPreExecute(); } @Override protected Boolean doInBackground(String... arg0) { temp = storeTemporaryphotos(path); LFMainActivity asyncActivityRef = reference.get(); if (!asyncActivityRef.all_photos) { success = asyncActivityRef.getAlbum().copySelectedPhotos(asyncActivityRef, path); MediaStoreProvider.getAlbums(asyncActivityRef); asyncActivityRef.getAlbum().updatePhotos(asyncActivityRef); } else { success = asyncActivityRef.copyfromallphotos(asyncActivityRef.getApplicationContext(), path); } return success; } @Override protected void onPostExecute(Boolean result) { LFMainActivity asyncActivityRef = reference.get(); if(result) { if(!asyncActivityRef.all_photos){ asyncActivityRef.mediaAdapter.swapDataSet(asyncActivityRef.getAlbum().getMedia(), false); }else { asyncActivityRef.mediaAdapter.swapDataSet(listAll, false); } asyncActivityRef.mediaAdapter.notifyDataSetChanged(); asyncActivityRef.invalidateOptionsMenu(); asyncActivityRef.swipeRefreshLayout.setRefreshing(false); asyncActivityRef.finishEditMode(); if (moveAction) SnackBarHandler.showWithBottomMargin(asyncActivityRef.mDrawerLayout, asyncActivityRef.getString(R.string.photos_moved_successfully), asyncActivityRef.navigationView.getHeight()); else if (copyAction){ snackbar = SnackBarHandler.showWithBottomMargin2(asyncActivityRef.mDrawerLayout, asyncActivityRef.getString(R.string.copied_successfully), asyncActivityRef.navigationView.getHeight(), Snackbar.LENGTH_SHORT); snackbar.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { for (Media media : temp) { String[] projection = {MediaStore.Images.Media._ID}; // Match on the file path String selection = MediaStore.Images.Media.DATA + " = ?"; String[] selectionArgs = new String[]{media.getPath()}; // Query for the ID of the media matching the file path Uri queryUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; ContentResolver contentResolver = getContentResolver(); Cursor c = contentResolver .query(queryUri, projection, selection, selectionArgs, null); if (c.moveToFirst()) { // We found the ID. Deleting the item via the content provider will also remove the file long id = c.getLong(c.getColumnIndexOrThrow(MediaStore.Images.Media._ID)); Uri deleteUri = ContentUris .withAppendedId(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, id); contentResolver.delete(deleteUri, null, null); } c.close(); } } }); } } else asyncActivityRef.requestSdCardPermissions(); } } }
1
12,801
@angmas1 just make one more change here too. Move the else line to the line above it, where the if block ends and remove the braces like in line 280.
fossasia-phimpme-android
java
@@ -21,7 +21,7 @@ func (s *BlkioGroup) Name() string { } func (s *BlkioGroup) Apply(path string, d *cgroupData) error { - return join(path, d.pid) + return apply(path, d.pid) } func (s *BlkioGroup) Set(path string, r *configs.Resources) error {
1
package fs import ( "bufio" "os" "path/filepath" "strconv" "strings" "github.com/opencontainers/runc/libcontainer/cgroups" "github.com/opencontainers/runc/libcontainer/configs" ) type BlkioGroup struct { weightFilename string weightDeviceFilename string } func (s *BlkioGroup) Name() string { return "blkio" } func (s *BlkioGroup) Apply(path string, d *cgroupData) error { return join(path, d.pid) } func (s *BlkioGroup) Set(path string, r *configs.Resources) error { s.detectWeightFilenames(path) if r.BlkioWeight != 0 { if err := cgroups.WriteFile(path, s.weightFilename, strconv.FormatUint(uint64(r.BlkioWeight), 10)); err != nil { return err } } if r.BlkioLeafWeight != 0 { if err := cgroups.WriteFile(path, "blkio.leaf_weight", strconv.FormatUint(uint64(r.BlkioLeafWeight), 10)); err != nil { return err } } for _, wd := range r.BlkioWeightDevice { if wd.Weight != 0 { if err := cgroups.WriteFile(path, s.weightDeviceFilename, wd.WeightString()); err != nil { return err } } if wd.LeafWeight != 0 { if err := cgroups.WriteFile(path, "blkio.leaf_weight_device", wd.LeafWeightString()); err != nil { return err } } } for _, td := range r.BlkioThrottleReadBpsDevice { if err := cgroups.WriteFile(path, "blkio.throttle.read_bps_device", td.String()); err != nil { return err } } for _, td := range r.BlkioThrottleWriteBpsDevice { if err := cgroups.WriteFile(path, "blkio.throttle.write_bps_device", td.String()); err != nil { return err } } for _, td := range r.BlkioThrottleReadIOPSDevice { if err := cgroups.WriteFile(path, "blkio.throttle.read_iops_device", td.String()); err != nil { return err } } for _, td := range r.BlkioThrottleWriteIOPSDevice { if err := cgroups.WriteFile(path, "blkio.throttle.write_iops_device", td.String()); err != nil { return err } } return nil } /* examples: blkio.sectors 8:0 6792 blkio.io_service_bytes 8:0 Read 1282048 8:0 Write 2195456 8:0 Sync 2195456 8:0 Async 1282048 8:0 Total 3477504 Total 3477504 blkio.io_serviced 8:0 Read 124 8:0 Write 104 8:0 Sync 104 8:0 Async 124 8:0 Total 228 Total 228 blkio.io_queued 8:0 Read 0 8:0 Write 0 8:0 Sync 0 8:0 Async 0 8:0 Total 0 Total 0 */ func splitBlkioStatLine(r rune) bool { return r == ' ' || r == ':' } func getBlkioStat(dir, file string) ([]cgroups.BlkioStatEntry, error) { var blkioStats []cgroups.BlkioStatEntry f, err := cgroups.OpenFile(dir, file, os.O_RDONLY) if err != nil { if os.IsNotExist(err) { return blkioStats, nil } return nil, err } defer f.Close() sc := bufio.NewScanner(f) for sc.Scan() { // format: dev type amount fields := strings.FieldsFunc(sc.Text(), splitBlkioStatLine) if len(fields) < 3 { if len(fields) == 2 && fields[0] == "Total" { // skip total line continue } else { return nil, malformedLine(dir, file, sc.Text()) } } v, err := strconv.ParseUint(fields[0], 10, 64) if err != nil { return nil, &parseError{Path: dir, File: file, Err: err} } major := v v, err = strconv.ParseUint(fields[1], 10, 64) if err != nil { return nil, &parseError{Path: dir, File: file, Err: err} } minor := v op := "" valueField := 2 if len(fields) == 4 { op = fields[2] valueField = 3 } v, err = strconv.ParseUint(fields[valueField], 10, 64) if err != nil { return nil, &parseError{Path: dir, File: file, Err: err} } blkioStats = append(blkioStats, cgroups.BlkioStatEntry{Major: major, Minor: minor, Op: op, Value: v}) } if err := sc.Err(); err != nil { return nil, &parseError{Path: dir, File: file, Err: err} } return blkioStats, nil } func (s *BlkioGroup) GetStats(path string, stats *cgroups.Stats) error { type blkioStatInfo struct { filename string blkioStatEntriesPtr *[]cgroups.BlkioStatEntry } bfqDebugStats := []blkioStatInfo{ { filename: "blkio.bfq.sectors_recursive", blkioStatEntriesPtr: &stats.BlkioStats.SectorsRecursive, }, { filename: "blkio.bfq.io_service_time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceTimeRecursive, }, { filename: "blkio.bfq.io_wait_time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoWaitTimeRecursive, }, { filename: "blkio.bfq.io_merged_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoMergedRecursive, }, { filename: "blkio.bfq.io_queued_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoQueuedRecursive, }, { filename: "blkio.bfq.time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoTimeRecursive, }, { filename: "blkio.bfq.io_serviced_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServicedRecursive, }, { filename: "blkio.bfq.io_service_bytes_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceBytesRecursive, }, } bfqStats := []blkioStatInfo{ { filename: "blkio.bfq.io_serviced_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServicedRecursive, }, { filename: "blkio.bfq.io_service_bytes_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceBytesRecursive, }, } cfqStats := []blkioStatInfo{ { filename: "blkio.sectors_recursive", blkioStatEntriesPtr: &stats.BlkioStats.SectorsRecursive, }, { filename: "blkio.io_service_time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceTimeRecursive, }, { filename: "blkio.io_wait_time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoWaitTimeRecursive, }, { filename: "blkio.io_merged_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoMergedRecursive, }, { filename: "blkio.io_queued_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoQueuedRecursive, }, { filename: "blkio.time_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoTimeRecursive, }, { filename: "blkio.io_serviced_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServicedRecursive, }, { filename: "blkio.io_service_bytes_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceBytesRecursive, }, } throttleRecursiveStats := []blkioStatInfo{ { filename: "blkio.throttle.io_serviced_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServicedRecursive, }, { filename: "blkio.throttle.io_service_bytes_recursive", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceBytesRecursive, }, } baseStats := []blkioStatInfo{ { filename: "blkio.throttle.io_serviced", blkioStatEntriesPtr: &stats.BlkioStats.IoServicedRecursive, }, { filename: "blkio.throttle.io_service_bytes", blkioStatEntriesPtr: &stats.BlkioStats.IoServiceBytesRecursive, }, } orderedStats := [][]blkioStatInfo{ bfqDebugStats, bfqStats, cfqStats, throttleRecursiveStats, baseStats, } var blkioStats []cgroups.BlkioStatEntry var err error for _, statGroup := range orderedStats { for i, statInfo := range statGroup { if blkioStats, err = getBlkioStat(path, statInfo.filename); err != nil || blkioStats == nil { // if error occurs on first file, move to next group if i == 0 { break } return err } *statInfo.blkioStatEntriesPtr = blkioStats // finish if all stats are gathered if i == len(statGroup)-1 { return nil } } } return nil } func (s *BlkioGroup) detectWeightFilenames(path string) { if s.weightFilename != "" { // Already detected. return } if cgroups.PathExists(filepath.Join(path, "blkio.weight")) { s.weightFilename = "blkio.weight" s.weightDeviceFilename = "blkio.weight_device" } else { s.weightFilename = "blkio.bfq.weight" s.weightDeviceFilename = "blkio.bfq.weight_device" } }
1
24,849
Looks like the actual rename is missing in this commit edit: Arf, nevermind, I was at the wrong commit
opencontainers-runc
go
@@ -67,7 +67,9 @@ module.exports = class Transloadit extends Plugin { this._onRestored = this._onRestored.bind(this) this._getPersistentData = this._getPersistentData.bind(this) - if (this.opts.params) { + if (this.opts.params || + // No params _and_ no custom getAssemblyOptions is an early error. + this.opts.getAssemblyOptions === defaultOptions.getAssemblyOptions) { AssemblyOptions.validateParams(this.opts.params) }
1
const Translator = require('@uppy/utils/lib/Translator') const { Plugin } = require('@uppy/core') const Tus = require('@uppy/tus') const Assembly = require('./Assembly') const Client = require('./Client') const AssemblyOptions = require('./AssemblyOptions') const AssemblyWatcher = require('./AssemblyWatcher') function defaultGetAssemblyOptions (file, options) { return { params: options.params, signature: options.signature, fields: options.fields } } const COMPANION = 'https://api2.transloadit.com/companion' // Regex matching acceptable postMessage() origins for authentication feedback from companion. const ALLOWED_COMPANION_PATTERN = /\.transloadit\.com$/ // Regex used to check if a Companion address is run by Transloadit. const TL_COMPANION = /https?:\/\/api2(?:-\w+)?\.transloadit\.com\/companion/ const TL_UPPY_SERVER = /https?:\/\/api2(?:-\w+)?\.transloadit\.com\/uppy-server/ /** * Upload files to Transloadit using Tus. */ module.exports = class Transloadit extends Plugin { constructor (uppy, opts) { super(uppy, opts) this.type = 'uploader' this.id = 'Transloadit' this.title = 'Transloadit' const defaultLocale = { strings: { creatingAssembly: 'Preparing upload...', creatingAssemblyFailed: 'Transloadit: Could not create Assembly', encoding: 'Encoding...' } } const defaultOptions = { service: 'https://api2.transloadit.com', waitForEncoding: false, waitForMetadata: false, alwaysRunAssembly: false, importFromUploadURLs: false, signature: null, params: null, fields: {}, getAssemblyOptions: defaultGetAssemblyOptions, locale: defaultLocale } this.opts = Object.assign({}, defaultOptions, opts) this.locale = Object.assign({}, defaultLocale, this.opts.locale) this.locale.strings = Object.assign({}, defaultLocale.strings, this.opts.locale.strings) this.translator = new Translator({ locale: this.locale }) this.i18n = this.translator.translate.bind(this.translator) this._prepareUpload = this._prepareUpload.bind(this) this._afterUpload = this._afterUpload.bind(this) this._handleError = this._handleError.bind(this) this._onFileUploadURLAvailable = this._onFileUploadURLAvailable.bind(this) this._onRestored = this._onRestored.bind(this) this._getPersistentData = this._getPersistentData.bind(this) if (this.opts.params) { AssemblyOptions.validateParams(this.opts.params) } this.client = new Client({ service: this.opts.service }) // Contains Assembly instances for in-progress Assemblies. this.activeAssemblies = {} } /** * Attach metadata to files to configure the Tus plugin to upload to Transloadit. * Also use Transloadit's Companion * * See: https://github.com/tus/tusd/wiki/Uploading-to-Transloadit-using-tus#uploading-using-tus * * @param {Object} file * @param {Object} status */ _attachAssemblyMetadata (file, status) { // Add the metadata parameters Transloadit needs. const meta = { ...file.meta, assembly_url: status.assembly_url, filename: file.name, fieldname: 'file' } // Add Assembly-specific Tus endpoint. const tus = { ...file.tus, endpoint: status.tus_url } // Set Companion location. We only add this, if 'file' has the attribute // remote, because this is the criteria to identify remote files. // We only replace the hostname for Transloadit's companions, so that // people can also self-host them while still using Transloadit for encoding. let remote = file.remote if (file.remote && TL_UPPY_SERVER.test(file.remote.serverUrl)) { const err = new Error( 'The https://api2.transloadit.com/uppy-server endpoint was renamed to ' + 'https://api2.transloadit.com/companion, please update your `serverUrl` ' + 'options accordingly.') // Explicitly log this error here because it is caught by the `createAssembly` // Promise further along. // That's fine, but createAssembly only shows the informer, we need something a // little more noisy. this.uppy.log(err) throw err } if (file.remote && TL_COMPANION.test(file.remote.serverUrl)) { const newHost = status.companion_url .replace(/\/$/, '') const path = file.remote.url .replace(file.remote.serverUrl, '') .replace(/^\//, '') remote = { ...file.remote, serverUrl: newHost, url: `${newHost}/${path}` } } // Store the Assembly ID this file is in on the file under the `transloadit` key. const newFile = { ...file, transloadit: { assembly: status.assembly_id } } // Only configure the Tus plugin if we are uploading straight to Transloadit (the default). if (!this.opts.importFromUploadURLs) { Object.assign(newFile, { meta, tus, remote }) } return newFile } _createAssembly (fileIDs, uploadID, options) { this.uppy.log('[Transloadit] create Assembly') return this.client.createAssembly({ params: options.params, fields: options.fields, expectedFiles: fileIDs.length, signature: options.signature }).then((newAssembly) => { const assembly = new Assembly(newAssembly) const status = assembly.status const { assemblies, uploadsAssemblies } = this.getPluginState() this.setPluginState({ // Store the Assembly status. assemblies: { ...assemblies, [status.assembly_id]: status }, // Store the list of Assemblies related to this upload. uploadsAssemblies: { ...uploadsAssemblies, [uploadID]: [ ...uploadsAssemblies[uploadID], status.assembly_id ] } }) const { files } = this.uppy.getState() const updatedFiles = {} fileIDs.forEach((id) => { updatedFiles[id] = this._attachAssemblyMetadata(this.uppy.getFile(id), status) }) this.uppy.setState({ files: { ...files, ...updatedFiles } }) this.uppy.emit('transloadit:assembly-created', status, fileIDs) this._connectAssembly(assembly) this.uppy.log(`[Transloadit] Created Assembly ${status.assembly_id}`) return assembly }).catch((err) => { err.message = `${this.i18n('creatingAssemblyFailed')}: ${err.message}` // Reject the promise. throw err }) } _shouldWaitAfterUpload () { return this.opts.waitForEncoding || this.opts.waitForMetadata } /** * Used when `importFromUploadURLs` is enabled: reserves all files in * the Assembly. */ _reserveFiles (assembly, fileIDs) { return Promise.all(fileIDs.map((fileID) => { const file = this.uppy.getFile(fileID) return this.client.reserveFile(assembly, file) })) } /** * Used when `importFromUploadURLs` is enabled: adds files to the Assembly * once they have been fully uploaded. */ _onFileUploadURLAvailable (file) { if (!file || !file.transloadit || !file.transloadit.assembly) { return } const { assemblies } = this.getPluginState() const assembly = assemblies[file.transloadit.assembly] this.client.addFile(assembly, file).catch((err) => { this.uppy.log(err) this.uppy.emit('transloadit:import-error', assembly, file.id, err) }) } _findFile (uploadedFile) { const files = this.uppy.getFiles() for (let i = 0; i < files.length; i++) { const file = files[i] // Completed file upload. if (file.uploadURL === uploadedFile.tus_upload_url) { return file } // In-progress file upload. if (file.tus && file.tus.uploadUrl === uploadedFile.tus_upload_url) { return file } if (!uploadedFile.is_tus_file) { // Fingers-crossed check for non-tus uploads, eg imported from S3. if (file.name === uploadedFile.name && file.size === uploadedFile.size) { return file } } } } _onFileUploadComplete (assemblyId, uploadedFile) { const state = this.getPluginState() const file = this._findFile(uploadedFile) if (!file) { this.uppy.log('[Transloadit] Couldn’t file the file, it was likely removed in the process') return } this.setPluginState({ files: Object.assign({}, state.files, { [uploadedFile.id]: { assembly: assemblyId, id: file.id, uploadedFile } }) }) this.uppy.emit('transloadit:upload', uploadedFile, this.getAssembly(assemblyId)) } /** * Callback when a new Assembly result comes in. * * @param {string} assemblyId * @param {string} stepName * @param {Object} result */ _onResult (assemblyId, stepName, result) { const state = this.getPluginState() const file = state.files[result.original_id] // The `file` may not exist if an import robot was used instead of a file upload. result.localId = file ? file.id : null const entry = { result, stepName, id: result.id, assembly: assemblyId } this.setPluginState({ results: [...state.results, entry] }) this.uppy.emit('transloadit:result', stepName, result, this.getAssembly(assemblyId)) } /** * When an Assembly has finished processing, get the final state * and emit it. * * @param {Object} status */ _onAssemblyFinished (status) { const url = status.assembly_ssl_url this.client.getAssemblyStatus(url).then((finalStatus) => { const state = this.getPluginState() this.setPluginState({ assemblies: Object.assign({}, state.assemblies, { [finalStatus.assembly_id]: finalStatus }) }) this.uppy.emit('transloadit:complete', finalStatus) }) } /** * Custom state serialization for the Golden Retriever plugin. * It will pass this back to the `_onRestored` function. * * @param {function} setData */ _getPersistentData (setData) { const state = this.getPluginState() const assemblies = state.assemblies const uploadsAssemblies = state.uploadsAssemblies setData({ [this.id]: { assemblies, uploadsAssemblies } }) } _onRestored (pluginData) { const savedState = pluginData && pluginData[this.id] ? pluginData[this.id] : {} const previousAssemblies = savedState.assemblies || {} const uploadsAssemblies = savedState.uploadsAssemblies || {} if (Object.keys(uploadsAssemblies).length === 0) { // Nothing to restore. return } // Convert loaded Assembly statuses to a Transloadit plugin state object. const restoreState = (assemblies) => { const files = {} const results = [] Object.keys(assemblies).forEach((id) => { const status = assemblies[id] status.uploads.forEach((uploadedFile) => { const file = this._findFile(uploadedFile) files[uploadedFile.id] = { id: file.id, assembly: id, uploadedFile } }) const state = this.getPluginState() Object.keys(status.results).forEach((stepName) => { status.results[stepName].forEach((result) => { const file = state.files[result.original_id] result.localId = file ? file.id : null results.push({ id: result.id, result, stepName, assembly: id }) }) }) }) this.setPluginState({ assemblies, files, results, uploadsAssemblies }) } // Set up the Assembly instances for existing Assemblies. const restoreAssemblies = () => { const { assemblies } = this.getPluginState() Object.keys(assemblies).forEach((id) => { const assembly = new Assembly(assemblies[id]) this._connectAssembly(assembly) }) } // Force-update all Assemblies to check for missed events. const updateAssemblies = () => { const { assemblies } = this.getPluginState() return Promise.all( Object.keys(assemblies).map((id) => { return this.activeAssemblies[id].update() }) ) } // Restore all Assembly state. this.restored = Promise.resolve().then(() => { restoreState(previousAssemblies) restoreAssemblies() return updateAssemblies() }) this.restored.then(() => { this.restored = null }) } _connectAssembly (assembly) { const { status } = assembly const id = status.assembly_id this.activeAssemblies[id] = assembly // Sync local `assemblies` state assembly.on('status', (newStatus) => { const { assemblies } = this.getPluginState() this.setPluginState({ assemblies: { ...assemblies, [id]: newStatus } }) }) assembly.on('upload', (file) => { this._onFileUploadComplete(id, file) }) assembly.on('error', (error) => { this.uppy.emit('transloadit:assembly-error', assembly.status, error) }) assembly.on('executing', () => { this.uppy.emit('transloadit:assembly-executing', assembly.status) }) if (this.opts.waitForEncoding) { assembly.on('result', (stepName, result) => { this._onResult(id, stepName, result) }) } if (this.opts.waitForEncoding) { assembly.on('finished', () => { this._onAssemblyFinished(assembly.status) }) } else if (this.opts.waitForMetadata) { assembly.on('metadata', () => { this._onAssemblyFinished(assembly.status) }) } // No need to connect to the socket if the Assembly has completed by now. if (assembly.ok === 'ASSEMBLY_COMPLETE') { return assembly } // TODO Do we still need this for anything…? // eslint-disable-next-line no-unused-vars const connected = new Promise((resolve, reject) => { assembly.once('connect', resolve) assembly.once('status', resolve) assembly.once('error', reject) }).then(() => { this.uppy.log('[Transloadit] Socket is ready') }) assembly.connect() return assembly } _prepareUpload (fileIDs, uploadID) { // Only use files without errors fileIDs = fileIDs.filter((file) => !file.error) fileIDs.forEach((fileID) => { const file = this.uppy.getFile(fileID) this.uppy.emit('preprocess-progress', file, { mode: 'indeterminate', message: this.i18n('creatingAssembly') }) }) const createAssembly = ({ fileIDs, options }) => { return this._createAssembly(fileIDs, uploadID, options).then((assembly) => { if (this.opts.importFromUploadURLs) { return this._reserveFiles(assembly, fileIDs) } }).then(() => { fileIDs.forEach((fileID) => { const file = this.uppy.getFile(fileID) this.uppy.emit('preprocess-complete', file) }) }).catch((err) => { fileIDs.forEach((fileID) => { const file = this.uppy.getFile(fileID) // Clear preprocessing state when the Assembly could not be created, // otherwise the UI gets confused about the lingering progress keys this.uppy.emit('preprocess-complete', file) this.uppy.emit('upload-error', file, err) }) throw err }) } const { uploadsAssemblies } = this.getPluginState() this.setPluginState({ uploadsAssemblies: { ...uploadsAssemblies, [uploadID]: [] } }) const files = fileIDs.map((id) => this.uppy.getFile(id)) const assemblyOptions = new AssemblyOptions(files, this.opts) return assemblyOptions.build().then( (assemblies) => Promise.all( assemblies.map(createAssembly) ), // If something went wrong before any Assemblies could be created, // clear all processing state. (err) => { fileIDs.forEach((fileID) => { const file = this.uppy.getFile(fileID) this.uppy.emit('preprocess-complete', file) this.uppy.emit('upload-error', file, err) }) throw err } ) } _afterUpload (fileIDs, uploadID) { // Only use files without errors fileIDs = fileIDs.filter((file) => !file.error) const state = this.getPluginState() // If we're still restoring state, wait for that to be done. if (this.restored) { return this.restored.then(() => { return this._afterUpload(fileIDs, uploadID) }) } const assemblyIDs = state.uploadsAssemblies[uploadID] // If we don't have to wait for encoding metadata or results, we can close // the socket immediately and finish the upload. if (!this._shouldWaitAfterUpload()) { assemblyIDs.forEach((assemblyID) => { const assembly = this.activeAssemblies[assemblyID] assembly.close() delete this.activeAssemblies[assemblyID] }) const assemblies = assemblyIDs.map((id) => this.getAssembly(id)) this.uppy.addResultData(uploadID, { transloadit: assemblies }) return Promise.resolve() } // If no Assemblies were created for this upload, we also do not have to wait. // There's also no sockets or anything to close, so just return immediately. if (assemblyIDs.length === 0) { this.uppy.addResultData(uploadID, { transloadit: [] }) return Promise.resolve() } // AssemblyWatcher tracks completion state of all Assemblies in this upload. const watcher = new AssemblyWatcher(this.uppy, assemblyIDs) fileIDs.forEach((fileID) => { const file = this.uppy.getFile(fileID) this.uppy.emit('postprocess-progress', file, { mode: 'indeterminate', message: this.i18n('encoding') }) }) watcher.on('assembly-complete', (id) => { const files = this.getAssemblyFiles(id) files.forEach((file) => { this.uppy.emit('postprocess-complete', file) }) }) watcher.on('assembly-error', (id, error) => { // Clear postprocessing state for all our files. const files = this.getAssemblyFiles(id) files.forEach((file) => { // TODO Maybe make a postprocess-error event here? this.uppy.emit('upload-error', file, error) this.uppy.emit('postprocess-complete', file) }) }) return watcher.promise.then(() => { const assemblies = assemblyIDs.map((id) => this.getAssembly(id)) // Remove the Assembly ID list for this upload, // it's no longer going to be used anywhere. const state = this.getPluginState() const uploadsAssemblies = { ...state.uploadsAssemblies } delete uploadsAssemblies[uploadID] this.setPluginState({ uploadsAssemblies }) this.uppy.addResultData(uploadID, { transloadit: assemblies }) }) } _handleError (err, uploadID) { this.uppy.log(`[Transloadit] _handleError in upload ${uploadID}`) this.uppy.log(err) const state = this.getPluginState() const assemblyIDs = state.uploadsAssemblies[uploadID] assemblyIDs.forEach((assemblyID) => { if (this.activeAssemblies[assemblyID]) { this.activeAssemblies[assemblyID].close() } }) } install () { this.uppy.addPreProcessor(this._prepareUpload) this.uppy.addPostProcessor(this._afterUpload) // We may need to close socket.io connections on error. this.uppy.on('error', this._handleError) if (this.opts.importFromUploadURLs) { // No uploader needed when importing; instead we take the upload URL from an existing uploader. this.uppy.on('upload-success', this._onFileUploadURLAvailable) } else { this.uppy.use(Tus, { // Disable tus-js-client fingerprinting, otherwise uploading the same file at different times // will upload to the same Assembly. resume: false, // Disable Companion's retry optimisation; we need to change the endpoint on retry // so it can't just reuse the same tus.Upload instance server-side. useFastRemoteRetry: false, // Only send Assembly metadata to the tus endpoint. metaFields: ['assembly_url', 'filename', 'fieldname'] }) } this.uppy.on('restore:get-data', this._getPersistentData) this.uppy.on('restored', this._onRestored) this.setPluginState({ // Contains Assembly status objects, indexed by their ID. assemblies: {}, // Contains arrays of Assembly IDs, indexed by the upload ID that they belong to. uploadsAssemblies: {}, // Contains file data from Transloadit, indexed by their Transloadit-assigned ID. files: {}, // Contains result data from Transloadit. results: [] }) } uninstall () { this.uppy.removePreProcessor(this._prepareUpload) this.uppy.removePostProcessor(this._afterUpload) this.uppy.off('error', this._handleError) if (this.opts.importFromUploadURLs) { this.uppy.off('upload-success', this._onFileUploadURLAvailable) } } getAssembly (id) { const state = this.getPluginState() return state.assemblies[id] } getAssemblyFiles (assemblyID) { return this.uppy.getFiles().filter((file) => { return file && file.transloadit && file.transloadit.assembly === assemblyID }) } } module.exports.COMPANION = COMPANION module.exports.UPPY_SERVER = COMPANION module.exports.COMPANION_PATTERN = ALLOWED_COMPANION_PATTERN
1
11,203
I'm a little stuck understanding, how would this result in an error when `this.opts.params` is not set? It will just not do `AssemblyOptions.validateParams(this.opts.params)`
transloadit-uppy
js
@@ -20,6 +20,15 @@ import ( "github.com/GoogleCloudPlatform/compute-image-tools/daisy" ) +var ( + // These patterns match a key of "Vars" in a daisy workflow. All CLI tools use these variables. + // + // For network and subnet, some workflows use the prefix `import_`. + networkPattern = regexp.MustCompile("^(import_)?network$") + subnetPattern = regexp.MustCompile("^(import_)?subnet$") + computeServiceAccountPattern = regexp.MustCompile("compute_service_account") +) + // ApplyAndValidateVars is a WorkflowHook that applies vars to a daisy workflow. // To ensure consistency across worker instances, if vars omits network, subnet, or the // compute service account, the modifier will automatically apply these values.
1
// Copyright 2021 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package daisyutils import ( "regexp" "github.com/GoogleCloudPlatform/compute-image-tools/daisy" ) // ApplyAndValidateVars is a WorkflowHook that applies vars to a daisy workflow. // To ensure consistency across worker instances, if vars omits network, subnet, or the // compute service account, the modifier will automatically apply these values. type ApplyAndValidateVars struct { env EnvironmentSettings vars map[string]string } // PreRunHook applies daisy vars to a workflow func (t *ApplyAndValidateVars) PreRunHook(wf *daisy.Workflow) error { // All CLI tools use these variables; if they're declared in the daisy workflow, but not passed by the caller in `vars`, // then apply them using the EnvironmentSettings. // // For network and subnet, some workflows use the prefix `import_`. t.backfillVar(regexp.MustCompile("^(import_)?network$"), t.env.Network, wf) t.backfillVar(regexp.MustCompile("^(import_)?subnet$"), t.env.Subnet, wf) t.backfillVar(regexp.MustCompile("compute_service_account"), t.env.ComputeServiceAccount, wf) Loop: for k, v := range t.vars { for wv := range wf.Vars { if k == wv { wf.AddVar(k, v) continue Loop } } return daisy.Errf("unknown workflow Var %q passed to Workflow %q", k, wf.Name) } return nil } func (t *ApplyAndValidateVars) backfillVar(keyPattern *regexp.Regexp, val string, wf *daisy.Workflow) { if val == "" { return } for k := range wf.Vars { if keyPattern.MatchString(k) && t.vars[k] == "" { t.vars[k] = val return } } }
1
14,008
networkVarPattern (etc) then?
GoogleCloudPlatform-compute-image-tools
go
@@ -138,8 +138,10 @@ module RSpec # don't clone the example group because the new example # must belong to the same example group (not a clone). + # + # block is nil in new_metadata so we have to get it from metadata. Example.new(example_group, description.clone, - new_metadata, new_metadata[:block]) + new_metadata, metadata[:block]) end # @private
1
module RSpec module Core # Wrapper for an instance of a subclass of {ExampleGroup}. An instance of # `RSpec::Core::Example` is returned by example definition methods # such as {ExampleGroup.it it} and is yielded to the {ExampleGroup.it it}, # {Hooks#before before}, {Hooks#after after}, {Hooks#around around}, # {MemoizedHelpers::ClassMethods#let let} and # {MemoizedHelpers::ClassMethods#subject subject} blocks. # # This allows us to provide rich metadata about each individual # example without adding tons of methods directly to the ExampleGroup # that users may inadvertently redefine. # # Useful for configuring logging and/or taking some action based # on the state of an example's metadata. # # @example # # RSpec.configure do |config| # config.before do |example| # log example.description # end # # config.after do |example| # log example.description # end # # config.around do |example| # log example.description # example.run # end # end # # shared_examples "auditable" do # it "does something" do # log "#{example.full_description}: #{auditable.inspect}" # auditable.should do_something # end # end # # @see ExampleGroup # @note Example blocks are evaluated in the context of an instance # of an `ExampleGroup`, not in the context of an instance of `Example`. class Example # @private # # Used to define methods that delegate to this example's metadata. def self.delegate_to_metadata(key) define_method(key) { @metadata[key] } end # @return [ExecutionResult] represents the result of running this example. delegate_to_metadata :execution_result # @return [String] the relative path to the file where this example was # defined. delegate_to_metadata :file_path # @return [String] the full description (including the docstrings of # all parent example groups). delegate_to_metadata :full_description # @return [String] the exact source location of this example in a form # like `./path/to/spec.rb:17` delegate_to_metadata :location # @return [Boolean] flag that indicates that the example is not expected # to pass. It will be run and will either have a pending result (if a # failure occurs) or a failed result (if no failure occurs). delegate_to_metadata :pending # @return [Boolean] flag that will cause the example to not run. # The {ExecutionResult} status will be `:pending`. delegate_to_metadata :skip # Returns the string submitted to `example` or its aliases (e.g. # `specify`, `it`, etc). If no string is submitted (e.g. # `it { is_expected.to do_something }`) it returns the message generated # by the matcher if there is one, otherwise returns a message including # the location of the example. def description description = if metadata[:description].to_s.empty? location_description else metadata[:description] end RSpec.configuration.format_docstrings_block.call(description) end # Returns a description of the example that always includes the location. def inspect_output inspect_output = "\"#{description}\"" unless metadata[:description].to_s.empty? inspect_output << " (#{location})" end inspect_output end # Returns the location-based argument that can be passed to the `rspec` command to rerun this example. def location_rerun_argument @location_rerun_argument ||= begin loaded_spec_files = RSpec.configuration.loaded_spec_files Metadata.ascending(metadata) do |meta| return meta[:location] if loaded_spec_files.include?(meta[:absolute_file_path]) end end end # Returns the location-based argument that can be passed to the `rspec` command to rerun this example. # # @deprecated Use {#location_rerun_argument} instead. # @note If there are multiple examples identified by this location, they will use {#id} # to rerun instead, but this method will still return the location (that's why it is deprecated!). def rerun_argument location_rerun_argument end # @return [String] the unique id of this example. Pass # this at the command line to re-run this exact example. def id @id ||= Metadata.id_from(metadata) end # @private def self.parse_id(id) # http://rubular.com/r/OMZSAPcAfn id.match(/\A(.*?)(?:\[([\d\s:,]+)\])?\z/).captures end # Duplicates the example and overrides metadata with the provided # hash. # # @param metadata_overrides [Hash] the hash to override the example metadata # @return [Example] a duplicate of the example with modified metadata def duplicate_with(metadata_overrides={}) new_metadata = metadata.clone.merge(metadata_overrides) RSpec::Core::Metadata::RESERVED_KEYS.each do |reserved_key| new_metadata.delete reserved_key end # don't clone the example group because the new example # must belong to the same example group (not a clone). Example.new(example_group, description.clone, new_metadata, new_metadata[:block]) end # @private def update_inherited_metadata(updates) metadata.update(updates) do |_key, existing_example_value, _new_inherited_value| existing_example_value end end # @attr_reader # # Returns the first exception raised in the context of running this # example (nil if no exception is raised). attr_reader :exception # @attr_reader # # Returns the metadata object associated with this example. attr_reader :metadata # @attr_reader # @private # # Returns the example_group_instance that provides the context for # running this example. attr_reader :example_group_instance # @attr # @private attr_accessor :clock # Creates a new instance of Example. # @param example_group_class [Class] the subclass of ExampleGroup in which # this Example is declared # @param description [String] the String passed to the `it` method (or # alias) # @param user_metadata [Hash] additional args passed to `it` to be used as # metadata # @param example_block [Proc] the block of code that represents the # example # @api private def initialize(example_group_class, description, user_metadata, example_block=nil) @example_group_class = example_group_class @example_block = example_block # Register the example with the group before creating the metadata hash. # This is necessary since creating the metadata hash triggers # `when_first_matching_example_defined` callbacks, in which users can # load RSpec support code which defines hooks. For that to work, the # examples and example groups must be registered at the time the # support code is called or be defined afterwards. # Begin defined beforehand but registered afterwards causes hooks to # not be applied where they should. example_group_class.examples << self @metadata = Metadata::ExampleHash.create( @example_group_class.metadata, user_metadata, example_group_class.method(:next_runnable_index_for), description, example_block ) # This should perhaps be done in `Metadata::ExampleHash.create`, # but the logic there has no knowledge of `RSpec.world` and we # want to keep it that way. It's easier to just assign it here. @metadata[:last_run_status] = RSpec.configuration.last_run_statuses[id] @example_group_instance = @exception = nil @clock = RSpec::Core::Time @reporter = RSpec::Core::NullReporter end # Provide a human-readable representation of this class def inspect "#<#{self.class.name} #{description.inspect}>" end alias to_s inspect # @return [RSpec::Core::Reporter] the current reporter for the example attr_reader :reporter # Returns the example group class that provides the context for running # this example. def example_group @example_group_class end alias_method :pending?, :pending alias_method :skipped?, :skip # @api private # instance_execs the block passed to the constructor in the context of # the instance of {ExampleGroup}. # @param example_group_instance the instance of an ExampleGroup subclass def run(example_group_instance, reporter) @example_group_instance = example_group_instance @reporter = reporter RSpec.configuration.configure_example(self, hooks) RSpec.current_example = self start(reporter) Pending.mark_pending!(self, pending) if pending? begin if skipped? Pending.mark_pending! self, skip elsif !RSpec.configuration.dry_run? with_around_and_singleton_context_hooks do begin run_before_example @example_group_instance.instance_exec(self, &@example_block) if pending? Pending.mark_fixed! self raise Pending::PendingExampleFixedError, 'Expected example to fail since it is pending, but it passed.', [location] end rescue Pending::SkipDeclaredInExample # no-op, required metadata has already been set by the `skip` # method. rescue AllExceptionsExcludingDangerousOnesOnRubiesThatAllowIt => e set_exception(e) ensure run_after_example end end end rescue Support::AllExceptionsExceptOnesWeMustNotRescue => e set_exception(e) ensure @example_group_instance = nil # if you love something... let it go end finish(reporter) ensure execution_result.ensure_timing_set(clock) RSpec.current_example = nil end if RSpec::Support::Ruby.jruby? || RUBY_VERSION.to_f < 1.9 # :nocov: # For some reason, rescuing `Support::AllExceptionsExceptOnesWeMustNotRescue` # in place of `Exception` above can cause the exit status to be the wrong # thing. I have no idea why. See: # https://github.com/rspec/rspec-core/pull/2063#discussion_r38284978 # @private AllExceptionsExcludingDangerousOnesOnRubiesThatAllowIt = Exception # :nocov: else # @private AllExceptionsExcludingDangerousOnesOnRubiesThatAllowIt = Support::AllExceptionsExceptOnesWeMustNotRescue end # Wraps both a `Proc` and an {Example} for use in {Hooks#around # around} hooks. In around hooks we need to yield this special # kind of object (rather than the raw {Example}) because when # there are multiple `around` hooks we have to wrap them recursively. # # @example # # RSpec.configure do |c| # c.around do |ex| # Procsy which wraps the example # if ex.metadata[:key] == :some_value && some_global_condition # raise "some message" # end # ex.run # run delegates to ex.call. # end # end # # @note This class also exposes the instance methods of {Example}, # proxying them through to the wrapped {Example} instance. class Procsy # The {Example} instance. attr_reader :example Example.public_instance_methods(false).each do |name| name_sym = name.to_sym next if name_sym == :run || name_sym == :inspect || name_sym == :to_s define_method(name) { |*a, &b| @example.__send__(name, *a, &b) } end Proc.public_instance_methods(false).each do |name| name_sym = name.to_sym next if name_sym == :call || name_sym == :inspect || name_sym == :to_s || name_sym == :to_proc define_method(name) { |*a, &b| @proc.__send__(name, *a, &b) } end # Calls the proc and notes that the example has been executed. def call(*args, &block) @executed = true @proc.call(*args, &block) end alias run call # Provides a wrapped proc that will update our `executed?` state when # executed. def to_proc method(:call).to_proc end def initialize(example, &block) @example = example @proc = block @executed = false end # @private def wrap(&block) self.class.new(example, &block) end # Indicates whether or not the around hook has executed the example. def executed? @executed end # @private def inspect @example.inspect.gsub('Example', 'ExampleProcsy') end end # @private # # The exception that will be displayed to the user -- either the failure of # the example or the `pending_exception` if the example is pending. def display_exception @exception || execution_result.pending_exception end # @private # # Assigns the exception that will be displayed to the user -- either the failure of # the example or the `pending_exception` if the example is pending. def display_exception=(ex) if pending? && !(Pending::PendingExampleFixedError === ex) @exception = nil execution_result.pending_fixed = false execution_result.pending_exception = ex else @exception = ex end end # rubocop:disable Style/AccessorMethodName # @private # # Used internally to set an exception in an after hook, which # captures the exception but doesn't raise it. def set_exception(exception) return self.display_exception = exception unless display_exception unless RSpec::Core::MultipleExceptionError === display_exception self.display_exception = RSpec::Core::MultipleExceptionError.new(display_exception) end display_exception.add exception end # @private # # Used to set the exception when `aggregate_failures` fails. def set_aggregate_failures_exception(exception) return set_exception(exception) unless display_exception exception = RSpec::Core::MultipleExceptionError::InterfaceTag.for(exception) exception.add display_exception self.display_exception = exception end # rubocop:enable Style/AccessorMethodName # @private # # Used internally to set an exception and fail without actually executing # the example when an exception is raised in before(:context). def fail_with_exception(reporter, exception) start(reporter) set_exception(exception) finish(reporter) end # @private # # Used internally to skip without actually executing the example when # skip is used in before(:context). def skip_with_exception(reporter, exception) start(reporter) Pending.mark_skipped! self, exception.argument finish(reporter) end # @private def instance_exec(*args, &block) @example_group_instance.instance_exec(*args, &block) end private def hooks example_group_instance.singleton_class.hooks end def with_around_example_hooks hooks.run(:around, :example, self) { yield } rescue Support::AllExceptionsExceptOnesWeMustNotRescue => e set_exception(e) end def start(reporter) reporter.example_started(self) execution_result.started_at = clock.now end def finish(reporter) pending_message = execution_result.pending_message if @exception execution_result.exception = @exception record_finished :failed reporter.example_failed self false elsif pending_message execution_result.pending_message = pending_message record_finished :pending reporter.example_pending self true else record_finished :passed reporter.example_passed self true end end def record_finished(status) execution_result.record_finished(status, clock.now) reporter.example_finished(self) end def run_before_example @example_group_instance.setup_mocks_for_rspec hooks.run(:before, :example, self) end def with_around_and_singleton_context_hooks singleton_context_hooks_host = example_group_instance.singleton_class singleton_context_hooks_host.run_before_context_hooks(example_group_instance) with_around_example_hooks { yield } ensure singleton_context_hooks_host.run_after_context_hooks(example_group_instance) end def run_after_example assign_generated_description if defined?(::RSpec::Matchers) hooks.run(:after, :example, self) verify_mocks ensure @example_group_instance.teardown_mocks_for_rspec end def verify_mocks @example_group_instance.verify_mocks_for_rspec if mocks_need_verification? rescue Support::AllExceptionsExceptOnesWeMustNotRescue => e set_exception(e) end def mocks_need_verification? exception.nil? || execution_result.pending_fixed? end def assign_generated_description if metadata[:description].empty? && (description = generate_description) metadata[:description] = description metadata[:full_description] << description end ensure RSpec::Matchers.clear_generated_description end def generate_description RSpec::Matchers.generated_description rescue Support::AllExceptionsExceptOnesWeMustNotRescue => e location_description + " (Got an error when generating description " \ "from matcher: #{e.class}: #{e.message} -- #{e.backtrace.first})" end def location_description "example at #{location}" end # Represents the result of executing an example. # Behaves like a hash for backwards compatibility. class ExecutionResult include HashImitatable # @return [Symbol] `:passed`, `:failed` or `:pending`. attr_accessor :status # @return [Exception, nil] The failure, if there was one. attr_accessor :exception # @return [Time] When the example started. attr_accessor :started_at # @return [Time] When the example finished. attr_accessor :finished_at # @return [Float] How long the example took in seconds. attr_accessor :run_time # @return [String, nil] The reason the example was pending, # or nil if the example was not pending. attr_accessor :pending_message # @return [Exception, nil] The exception triggered while # executing the pending example. If no exception was triggered # it would no longer get a status of `:pending` unless it was # tagged with `:skip`. attr_accessor :pending_exception # @return [Boolean] For examples tagged with `:pending`, # this indicates whether or not it now passes. attr_accessor :pending_fixed alias pending_fixed? pending_fixed # @return [Boolean] Indicates if the example was completely skipped # (typically done via `:skip` metadata or the `skip` method). Skipped examples # will have a `:pending` result. A `:pending` result can also come from examples # that were marked as `:pending`, which causes them to be run, and produces a # `:failed` result if the example passes. def example_skipped? status == :pending && !pending_exception end # @api private # Records the finished status of the example. def record_finished(status, finished_at) self.status = status calculate_run_time(finished_at) end # @api private # Populates finished_at and run_time if it has not yet been set def ensure_timing_set(clock) calculate_run_time(clock.now) unless finished_at end private def calculate_run_time(finished_at) self.finished_at = finished_at self.run_time = (finished_at - started_at).to_f end # For backwards compatibility we present `status` as a string # when presenting the legacy hash interface. def hash_for_delegation super.tap do |hash| hash[:status] &&= status.to_s end end def set_value(name, value) value &&= value.to_sym if name == :status super(name, value) end def get_value(name) if name == :status status.to_s if status else super end end def issue_deprecation(_method_name, *_args) RSpec.deprecate("Treating `metadata[:execution_result]` as a hash", :replacement => "the attributes methods to access the data") end end end # @private # Provides an execution context for before/after :suite hooks. class SuiteHookContext < Example def initialize super(AnonymousExampleGroup, "", {}) @example_group_instance = AnonymousExampleGroup.new end # rubocop:disable Style/AccessorMethodName # To ensure we don't silence errors. def set_exception(exception) raise exception end # rubocop:enable Style/AccessorMethodName end end end
1
16,279
I think it makes sense to do `metadata[:block].clone`, as thats what `new_metadata` is, but somehow it's missing the block from its clone.
rspec-rspec-core
rb
@@ -51,10 +51,12 @@ class ClosureAnalyzer extends FunctionLikeAnalyzer */ public function getClosureId(): string { - return strtolower($this->getFilePath()) - . ':' . $this->function->getLine() - . ':' . (int)$this->function->getAttribute('startFilePos') - . ':-:closure'; + return strtolower( + $this->getFilePath() + . ':' . $this->function->getLine() + . ':' . (int)$this->function->getAttribute('startFilePos') + . ':-:closure' + ); } /**
1
<?php namespace Psalm\Internal\Analyzer; use PhpParser; use Psalm\CodeLocation; use Psalm\Context; use Psalm\Issue\DuplicateParam; use Psalm\Issue\PossiblyUndefinedVariable; use Psalm\Issue\UndefinedVariable; use Psalm\IssueBuffer; use Psalm\Internal\DataFlow\DataFlowNode; use Psalm\Type; use Psalm\Type\Atomic\TNamedObject; use function strpos; use function is_string; use function in_array; use function strtolower; use function array_map; use function preg_match; /** * @internal * @extends FunctionLikeAnalyzer<PhpParser\Node\Expr\Closure|PhpParser\Node\Expr\ArrowFunction> */ class ClosureAnalyzer extends FunctionLikeAnalyzer { /** * @param PhpParser\Node\Expr\Closure|PhpParser\Node\Expr\ArrowFunction $function */ public function __construct(PhpParser\Node\FunctionLike $function, SourceAnalyzer $source) { $codebase = $source->getCodebase(); $function_id = \strtolower($source->getFilePath()) . ':' . $function->getLine() . ':' . (int)$function->getAttribute('startFilePos') . ':-:closure'; $storage = $codebase->getClosureStorage($source->getFilePath(), $function_id); parent::__construct($function, $source, $storage); } public function getTemplateTypeMap(): ?array { return $this->source->getTemplateTypeMap(); } /** * @return non-empty-lowercase-string */ public function getClosureId(): string { return strtolower($this->getFilePath()) . ':' . $this->function->getLine() . ':' . (int)$this->function->getAttribute('startFilePos') . ':-:closure'; } /** * @param PhpParser\Node\Expr\Closure|PhpParser\Node\Expr\ArrowFunction $stmt */ public static function analyzeExpression( StatementsAnalyzer $statements_analyzer, PhpParser\Node\FunctionLike $stmt, Context $context ) : bool { $closure_analyzer = new ClosureAnalyzer($stmt, $statements_analyzer); if ($stmt instanceof PhpParser\Node\Expr\Closure && self::analyzeClosureUses($statements_analyzer, $stmt, $context) === false ) { return false; } $use_context = new Context($context->self); $codebase = $statements_analyzer->getCodebase(); if (!$statements_analyzer->isStatic()) { if ($context->collect_mutations && $context->self && $codebase->classExtends( $context->self, (string)$statements_analyzer->getFQCLN() ) ) { /** @psalm-suppress PossiblyUndefinedStringArrayOffset */ $use_context->vars_in_scope['$this'] = clone $context->vars_in_scope['$this']; } elseif ($context->self) { $this_atomic = new TNamedObject($context->self); $this_atomic->was_static = true; $use_context->vars_in_scope['$this'] = new Type\Union([$this_atomic]); } } foreach ($context->vars_in_scope as $var => $type) { if (strpos($var, '$this->') === 0) { $use_context->vars_in_scope[$var] = clone $type; } } if ($context->self) { $self_class_storage = $codebase->classlike_storage_provider->get($context->self); ClassAnalyzer::addContextProperties( $statements_analyzer, $self_class_storage, $use_context, $context->self, $statements_analyzer->getParentFQCLN() ); } foreach ($context->vars_possibly_in_scope as $var => $_) { if (strpos($var, '$this->') === 0) { $use_context->vars_possibly_in_scope[$var] = true; } } if ($stmt instanceof PhpParser\Node\Expr\Closure) { foreach ($stmt->uses as $use) { if (!is_string($use->var->name)) { continue; } $use_var_id = '$' . $use->var->name; // insert the ref into the current context if passed by ref, as whatever we're passing // the closure to could execute it straight away. if (!$context->hasVariable($use_var_id) && $use->byRef) { $context->vars_in_scope[$use_var_id] = Type::getMixed(); } if ($statements_analyzer->data_flow_graph instanceof \Psalm\Internal\Codebase\VariableUseGraph && $context->hasVariable($use_var_id) ) { $parent_nodes = $context->vars_in_scope[$use_var_id]->parent_nodes; foreach ($parent_nodes as $parent_node) { $statements_analyzer->data_flow_graph->addPath( $parent_node, new DataFlowNode('closure-use', 'closure use', null), 'closure-use' ); } } $use_context->vars_in_scope[$use_var_id] = $context->hasVariable($use_var_id) && !$use->byRef ? clone $context->vars_in_scope[$use_var_id] : Type::getMixed(); if ($use->byRef) { $use_context->vars_in_scope[$use_var_id]->by_ref = true; } $use_context->vars_possibly_in_scope[$use_var_id] = true; foreach ($context->vars_in_scope as $var_id => $type) { if (preg_match('/^\$' . $use->var->name . '[\[\-]/', $var_id)) { $use_context->vars_in_scope[$var_id] = clone $type; $use_context->vars_possibly_in_scope[$var_id] = true; } } } } else { $traverser = new PhpParser\NodeTraverser; $short_closure_visitor = new \Psalm\Internal\PhpVisitor\ShortClosureVisitor(); $traverser->addVisitor($short_closure_visitor); $traverser->traverse($stmt->getStmts()); foreach ($short_closure_visitor->getUsedVariables() as $use_var_id => $_) { if ($context->hasVariable($use_var_id)) { $use_context->vars_in_scope[$use_var_id] = clone $context->vars_in_scope[$use_var_id]; if ($statements_analyzer->data_flow_graph instanceof \Psalm\Internal\Codebase\VariableUseGraph) { $parent_nodes = $context->vars_in_scope[$use_var_id]->parent_nodes; foreach ($parent_nodes as $parent_node) { $statements_analyzer->data_flow_graph->addPath( $parent_node, new DataFlowNode('closure-use', 'closure use', null), 'closure-use' ); } } } $use_context->vars_possibly_in_scope[$use_var_id] = true; } } $use_context->calling_method_id = $context->calling_method_id; $closure_analyzer->analyze($use_context, $statements_analyzer->node_data, $context, false); if ($closure_analyzer->inferred_impure && $statements_analyzer->getSource() instanceof \Psalm\Internal\Analyzer\FunctionLikeAnalyzer ) { $statements_analyzer->getSource()->inferred_impure = true; } if ($closure_analyzer->inferred_has_mutation && $statements_analyzer->getSource() instanceof \Psalm\Internal\Analyzer\FunctionLikeAnalyzer ) { $statements_analyzer->getSource()->inferred_has_mutation = true; } if (!$statements_analyzer->node_data->getType($stmt)) { $statements_analyzer->node_data->setType($stmt, Type::getClosure()); } return true; } /** * @return false|null */ public static function analyzeClosureUses( StatementsAnalyzer $statements_analyzer, PhpParser\Node\Expr\Closure $stmt, Context $context ): ?bool { $param_names = array_map( function (PhpParser\Node\Param $p) : string { if (!$p->var instanceof PhpParser\Node\Expr\Variable || !is_string($p->var->name) ) { return ''; } return $p->var->name; }, $stmt->params ); foreach ($stmt->uses as $use) { if (!is_string($use->var->name)) { continue; } $use_var_id = '$' . $use->var->name; if (in_array($use->var->name, $param_names)) { if (IssueBuffer::accepts( new DuplicateParam( 'Closure use duplicates param name ' . $use_var_id, new CodeLocation($statements_analyzer->getSource(), $use->var) ), $statements_analyzer->getSuppressedIssues() )) { return false; } } if (!$context->hasVariable($use_var_id)) { if ($use_var_id === '$argv' || $use_var_id === '$argc') { continue; } if ($use->byRef) { $context->vars_in_scope[$use_var_id] = Type::getMixed(); $context->vars_possibly_in_scope[$use_var_id] = true; if (!$statements_analyzer->hasVariable($use_var_id)) { $statements_analyzer->registerVariable( $use_var_id, new CodeLocation($statements_analyzer, $use->var), null ); } return null; } if (!isset($context->vars_possibly_in_scope[$use_var_id])) { if ($context->check_variables) { if (IssueBuffer::accepts( new UndefinedVariable( 'Cannot find referenced variable ' . $use_var_id, new CodeLocation($statements_analyzer->getSource(), $use->var) ), $statements_analyzer->getSuppressedIssues() )) { return false; } return null; } } $first_appearance = $statements_analyzer->getFirstAppearance($use_var_id); if ($first_appearance) { if (IssueBuffer::accepts( new PossiblyUndefinedVariable( 'Possibly undefined variable ' . $use_var_id . ', first seen on line ' . $first_appearance->getLineNumber(), new CodeLocation($statements_analyzer->getSource(), $use->var) ), $statements_analyzer->getSuppressedIssues() )) { return false; } continue; } if ($context->check_variables) { if (IssueBuffer::accepts( new UndefinedVariable( 'Cannot find referenced variable ' . $use_var_id, new CodeLocation($statements_analyzer->getSource(), $use->var) ), $statements_analyzer->getSuppressedIssues() )) { return false; } continue; } } elseif ($use->byRef) { $new_type = Type::getMixed(); $new_type->parent_nodes = $context->vars_in_scope[$use_var_id]->parent_nodes; $context->remove($use_var_id); $context->vars_in_scope[$use_var_id] = $new_type; } } return null; } }
1
10,254
Just realized this should be fixed instead of changing here. Concatenating int should still result in lowercase string.
vimeo-psalm
php
@@ -79,7 +79,7 @@ module Selenium opts[:browser_version] = opts.delete(:version) if opts.key?(:version) opts[:platform_name] = opts.delete(:platform) if opts.key?(:platform) - new({browser_name: 'firefox'}.merge(opts)) + new({browser_name: 'firefox', marionette: true}.merge(opts)) end alias_method :ff, :firefox
1
# encoding: utf-8 # # Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. module Selenium module WebDriver module Remote # # Specification of the desired and/or actual capabilities of the browser that the # server is being asked to create. # class W3CCapabilities DEFAULTS = { browser_name: '', browser_version: :any, platform_name: :any, platform_version: :any, accept_ssl_certs: false, page_load_strategy: 'normal', proxy: nil }.freeze KNOWN = [ :remote_session_id, :xul_app_id, :raise_accessibility_exceptions, :rotatable, :app_build_id, :device ].freeze (DEFAULTS.keys + KNOWN).each do |key| define_method key do @capabilities.fetch(key) end define_method "#{key}=" do |value| @capabilities[key] = value end end # # Backward compatibility # alias_method :version, :browser_version alias_method :version=, :browser_version= alias_method :platform, :platform_name alias_method :platform=, :platform_name= # # Convenience methods for the common choices. # class << self def edge(opts = {}) new({ browser_name: 'MicrosoftEdge', platform: :windows }.merge(opts)) end def firefox(opts = {}) opts[:browser_version] = opts.delete(:version) if opts.key?(:version) opts[:platform_name] = opts.delete(:platform) if opts.key?(:platform) new({browser_name: 'firefox'}.merge(opts)) end alias_method :ff, :firefox def w3c?(opts = {}) opts[:marionette] != false && (!opts[:desired_capabilities] || opts[:desired_capabilities][:marionette] != false) end # # @api private # def json_create(data) data = data.dup # Convert due to Remote Driver implementation data['browserVersion'] = data.delete('version') if data.key? 'version' data['platformName'] = data.delete('platform') if data.key? 'platform' caps = new caps.browser_name = data.delete('browserName') caps.browser_version = data.delete('browserVersion') caps.platform_name = data.delete('platformName') caps.platform_version = data.delete('platformVersion') caps.accept_ssl_certs = data.delete('acceptSslCerts') caps.page_load_strategy = data.delete('pageLoadStrategy') proxy = data.delete('proxy') caps.proxy = Proxy.json_create(proxy) unless proxy.nil? || proxy.empty? # Remote Server Specific caps[:remote_session_id] = data.delete('webdriver.remote.sessionid') # Obsolete capabilities returned by Remote Server data.delete('javascriptEnabled') data.delete('cssSelectorsEnabled') # Marionette Specific caps[:xul_app_id] = data.delete('XULappId') caps[:raise_accessibility_exceptions] = data.delete('raisesAccessibilityExceptions') caps[:rotatable] = data.delete('rotatable') caps[:app_build_id] = data.delete('appBuildId') caps[:device] = data.delete('device') # any remaining pairs will be added as is, with no conversion caps.merge!(data) caps end end # @param [Hash] opts # @option :browser_name [String] required browser name # @option :browser_version [String] required browser version number # @option :platform_name [Symbol] one of :any, :win, :mac, or :x # @option :platform_version [String] required platform version number # @option :accept_ssl_certs [Boolean] does the driver accept SSL Cerfifications? # @option :proxy [Selenium::WebDriver::Proxy, Hash] proxy configuration # # @api public # def initialize(opts = {}) @capabilities = DEFAULTS.merge(opts) self.proxy = opts.delete(:proxy) end # # Allows setting arbitrary capabilities. # def []=(key, value) @capabilities[key] = value end def [](key) @capabilities[key] end def merge!(other) if other.respond_to?(:capabilities, true) && other.capabilities.is_a?(Hash) @capabilities.merge! other.capabilities elsif other.is_a? Hash @capabilities.merge! other else raise ArgumentError, 'argument should be a Hash or implement #capabilities' end end def proxy=(proxy) case proxy when Hash @capabilities[:proxy] = Proxy.new(proxy) when Proxy, nil @capabilities[:proxy] = proxy else raise TypeError, "expected Hash or #{Proxy.name}, got #{proxy.inspect}:#{proxy.class}" end end # @api private # def as_json(*) hash = {} @capabilities.each do |key, value| case key when :platform hash['platform'] = value.to_s.upcase when :proxy hash['proxy'] = value.as_json if value when :firefox_options hash['moz:firefoxOptions'] = value when String, :firefox_binary hash[key.to_s] = value when Symbol hash[camel_case(key.to_s)] = value else raise TypeError, "expected String or Symbol, got #{key.inspect}:#{key.class} / #{value.inspect}" end end hash end def to_json(*) JSON.generate as_json end def ==(other) return false unless other.is_a? self.class as_json == other.as_json end alias_method :eql?, :== protected attr_reader :capabilities private def camel_case(str) str.gsub(/_([a-z])/) { Regexp.last_match(1).upcase } end end # W3CCapabilities end # Remote end # WebDriver end # Selenium
1
13,902
For Se 3.x we shouldn't need to specify `marionette: true`? I'll see if I can figure out where this is getting used that it might be necessary here.
SeleniumHQ-selenium
py
@@ -34,14 +34,14 @@ func initStoreTest(ctx context.Context, t *testing.T, dstP *DefaultSyncerTestPar requireSetTestChain(t, con, true, dstP) } -func newChainStore(dstP *DefaultSyncerTestParams) chain.Store { +func newChainStore(dstP *DefaultSyncerTestParams) *chain.DefaultStore { r := repo.NewInMemoryRepo() ds := r.Datastore() return chain.NewDefaultStore(ds, dstP.genCid) } // requirePutTestChain adds all test chain tipsets to the passed in chain store. -func requirePutTestChain(t *testing.T, chainStore chain.Store, dstP *DefaultSyncerTestParams) { +func requirePutTestChain(t *testing.T, chainStore *chain.DefaultStore, dstP *DefaultSyncerTestParams) { ctx := context.Background() genTsas := &chain.TipSetAndState{ TipSet: dstP.genTS,
1
package chain_test import ( "context" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/ipfs/go-cid" "github.com/ipfs/go-hamt-ipld" bstore "github.com/ipfs/go-ipfs-blockstore" "github.com/filecoin-project/go-filecoin/chain" "github.com/filecoin-project/go-filecoin/consensus" "github.com/filecoin-project/go-filecoin/proofs" "github.com/filecoin-project/go-filecoin/repo" th "github.com/filecoin-project/go-filecoin/testhelpers" tf "github.com/filecoin-project/go-filecoin/testhelpers/testflags" "github.com/filecoin-project/go-filecoin/types" ) // Note: many of these tests use the test chain defined in the init function of default_syncer_test. func initStoreTest(ctx context.Context, t *testing.T, dstP *DefaultSyncerTestParams) { powerTable := &th.TestView{} r := repo.NewInMemoryRepo() bs := bstore.NewBlockstore(r.Datastore()) cst := hamt.NewCborStore() con := consensus.NewExpected(cst, bs, th.NewTestProcessor(), powerTable, dstP.genCid, proofs.NewFakeVerifier(true, nil)) initGenesisWrapper := func(cst *hamt.CborIpldStore, bs bstore.Blockstore) (*types.Block, error) { return initGenesis(dstP.minerAddress, dstP.minerOwnerAddress, dstP.minerPeerID, cst, bs) } initSyncTest(t, con, initGenesisWrapper, cst, bs, r, dstP, chain.Syncing) requireSetTestChain(t, con, true, dstP) } func newChainStore(dstP *DefaultSyncerTestParams) chain.Store { r := repo.NewInMemoryRepo() ds := r.Datastore() return chain.NewDefaultStore(ds, dstP.genCid) } // requirePutTestChain adds all test chain tipsets to the passed in chain store. func requirePutTestChain(t *testing.T, chainStore chain.Store, dstP *DefaultSyncerTestParams) { ctx := context.Background() genTsas := &chain.TipSetAndState{ TipSet: dstP.genTS, TipSetStateRoot: dstP.genStateRoot, } link1Tsas := &chain.TipSetAndState{ TipSet: dstP.link1, TipSetStateRoot: dstP.link1State, } link2Tsas := &chain.TipSetAndState{ TipSet: dstP.link2, TipSetStateRoot: dstP.link2State, } link3Tsas := &chain.TipSetAndState{ TipSet: dstP.link3, TipSetStateRoot: dstP.link3State, } link4Tsas := &chain.TipSetAndState{ TipSet: dstP.link4, TipSetStateRoot: dstP.link4State, } th.RequirePutTsas(ctx, t, chainStore, genTsas) th.RequirePutTsas(ctx, t, chainStore, link1Tsas) th.RequirePutTsas(ctx, t, chainStore, link2Tsas) th.RequirePutTsas(ctx, t, chainStore, link3Tsas) th.RequirePutTsas(ctx, t, chainStore, link4Tsas) } func requireGetTsasByParentAndHeight(t *testing.T, chain chain.Store, pKey string, h uint64) []*chain.TipSetAndState { tsasSlice, err := chain.GetTipSetAndStatesByParentsAndHeight(pKey, h) require.NoError(t, err) return tsasSlice } func requireHeadTipset(t *testing.T, chain chain.Store) types.TipSet { headTipSet, err := chain.GetTipSet(chain.GetHead()) require.NoError(t, err) return headTipSet } /* Putting and getting tipsets into and from the store. */ // Adding tipsets to the store doesn't error. func TestPutTipSet(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) cs := newChainStore(dstP) genTsas := &chain.TipSetAndState{ TipSet: dstP.genTS, TipSetStateRoot: dstP.genStateRoot, } err := cs.PutTipSetAndState(ctx, genTsas) assert.NoError(t, err) } // Tipsets can be retrieved by key (all block cids). func TestGetByKey(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) requirePutTestChain(t, chain, dstP) gotGTS := requireGetTipSet(ctx, t, chain, dstP.genTS.ToSortedCidSet()) gotGTSSR := requireGetTipSetStateRoot(ctx, t, chain, dstP.genTS.ToSortedCidSet()) got1TS := requireGetTipSet(ctx, t, chain, dstP.link1.ToSortedCidSet()) got1TSSR := requireGetTipSetStateRoot(ctx, t, chain, dstP.link1.ToSortedCidSet()) got2TS := requireGetTipSet(ctx, t, chain, dstP.link2.ToSortedCidSet()) got2TSSR := requireGetTipSetStateRoot(ctx, t, chain, dstP.link2.ToSortedCidSet()) got3TS := requireGetTipSet(ctx, t, chain, dstP.link3.ToSortedCidSet()) got3TSSR := requireGetTipSetStateRoot(ctx, t, chain, dstP.link3.ToSortedCidSet()) got4TS := requireGetTipSet(ctx, t, chain, dstP.link4.ToSortedCidSet()) got4TSSR := requireGetTipSetStateRoot(ctx, t, chain, dstP.link4.ToSortedCidSet()) assert.Equal(t, dstP.genTS, gotGTS) assert.Equal(t, dstP.link1, got1TS) assert.Equal(t, dstP.link2, got2TS) assert.Equal(t, dstP.link3, got3TS) assert.Equal(t, dstP.link4, got4TS) assert.Equal(t, dstP.genStateRoot, gotGTSSR) assert.Equal(t, dstP.link1State, got1TSSR) assert.Equal(t, dstP.link2State, got2TSSR) assert.Equal(t, dstP.link3State, got3TSSR) assert.Equal(t, dstP.link4State, got4TSSR) } // Tipsets can be retrieved by parent key (all block cids of parents). func TestGetByParent(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) requirePutTestChain(t, chain, dstP) pkg := types.SortedCidSet{}.String() // empty cid set is dstP.genesis pIDs pk1 := dstP.genTS.String() pk2 := dstP.link1.String() pk3 := dstP.link2.String() pk4 := dstP.link3.String() gotG := requireGetTsasByParentAndHeight(t, chain, pkg, uint64(0)) got1 := requireGetTsasByParentAndHeight(t, chain, pk1, uint64(1)) got2 := requireGetTsasByParentAndHeight(t, chain, pk2, uint64(2)) got3 := requireGetTsasByParentAndHeight(t, chain, pk3, uint64(3)) got4 := requireGetTsasByParentAndHeight(t, chain, pk4, uint64(6)) // two null blocks in between 3 and 4! assert.Equal(t, dstP.genTS, gotG[0].TipSet) assert.Equal(t, dstP.link1, got1[0].TipSet) assert.Equal(t, dstP.link2, got2[0].TipSet) assert.Equal(t, dstP.link3, got3[0].TipSet) assert.Equal(t, dstP.link4, got4[0].TipSet) assert.Equal(t, dstP.genStateRoot, gotG[0].TipSetStateRoot) assert.Equal(t, dstP.link1State, got1[0].TipSetStateRoot) assert.Equal(t, dstP.link2State, got2[0].TipSetStateRoot) assert.Equal(t, dstP.link3State, got3[0].TipSetStateRoot) assert.Equal(t, dstP.link4State, got4[0].TipSetStateRoot) } func TestGetMultipleByParent(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chainStore := newChainStore(dstP) mockSigner, ki := types.NewMockSignersAndKeyInfo(2) mockSignerPubKey := ki[0].PublicKey() fakeChildParams := th.FakeChildParams{ Parent: dstP.genTS, GenesisCid: dstP.genCid, StateRoot: dstP.genStateRoot, MinerAddr: dstP.minerAddress, Nonce: uint64(5), Signer: mockSigner, MinerPubKey: mockSignerPubKey, } requirePutTestChain(t, chainStore, dstP) pk1 := dstP.genTS.String() // give one parent multiple children and then query newBlk := th.RequireMkFakeChild(t, fakeChildParams) newChild := th.RequireNewTipSet(t, newBlk) newRoot := dstP.cidGetter() newChildTsas := &chain.TipSetAndState{ TipSet: newChild, TipSetStateRoot: newRoot, } th.RequirePutTsas(ctx, t, chainStore, newChildTsas) gotNew1 := requireGetTsasByParentAndHeight(t, chainStore, pk1, uint64(1)) require.Equal(t, 2, len(gotNew1)) for _, tsas := range gotNew1 { if len(tsas.TipSet) == 1 { assert.Equal(t, newRoot, tsas.TipSetStateRoot) } else { assert.Equal(t, dstP.link1State, tsas.TipSetStateRoot) } } } // All blocks of a tipset can be retrieved after putting their wrapping tipset. func TestGetBlocks(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) blks := []*types.Block{dstP.genesis, dstP.link1blk1, dstP.link1blk2, dstP.link2blk1, dstP.link2blk2, dstP.link2blk3, dstP.link3blk1, dstP.link4blk1, dstP.link4blk2} _, err := chain.GetBlock(ctx, dstP.genCid) assert.Error(t, err) // Get errors before put requirePutTestChain(t, chain, dstP) var cids types.SortedCidSet for _, blk := range blks { c := blk.Cid() (&cids).Add(c) gotBlk, err := chain.GetBlock(ctx, c) assert.NoError(t, err) assert.Equal(t, blk.Cid(), gotBlk.Cid()) } gotBlks, err := chain.GetBlocks(ctx, cids) assert.NoError(t, err) assert.Equal(t, len(blks), len(gotBlks)) } // chain.Store correctly indicates that is has all blocks in put tipsets func TestHasAllBlocks(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) blks := []*types.Block{dstP.genesis, dstP.link1blk1, dstP.link1blk2, dstP.link2blk1, dstP.link2blk2, dstP.link2blk3, dstP.link3blk1, dstP.link4blk1, dstP.link4blk2} assert.False(t, chain.HasBlock(ctx, dstP.genCid)) // Has returns false before put requirePutTestChain(t, chain, dstP) var cids []cid.Cid for _, blk := range blks { c := blk.Cid() cids = append(cids, c) assert.True(t, chain.HasBlock(ctx, c)) } assert.True(t, chain.HasAllBlocks(ctx, cids)) } /* Head and its State is set and notified properly. */ // The constructor call sets the dstP.genesis block for the chain store. func TestSetGenesis(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) requirePutTestChain(t, chain, dstP) require.Equal(t, dstP.genCid, chain.GenesisCid()) } func assertSetHead(t *testing.T, chainStore chain.Store, ts types.TipSet) { ctx := context.Background() err := chainStore.SetHead(ctx, ts) assert.NoError(t, err) } // Set and Get Head. func TestHead(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chain := newChainStore(dstP) requirePutTestChain(t, chain, dstP) // Head starts as an empty cid set assert.Equal(t, types.SortedCidSet{}, chain.GetHead()) // Set Head assertSetHead(t, chain, dstP.genTS) assert.Equal(t, dstP.genTS.ToSortedCidSet(), chain.GetHead()) // Move head forward assertSetHead(t, chain, dstP.link4) assert.Equal(t, dstP.link4.ToSortedCidSet(), chain.GetHead()) // Move head back assertSetHead(t, chain, dstP.genTS) assert.Equal(t, dstP.genTS.ToSortedCidSet(), chain.GetHead()) } func assertEmptyCh(t *testing.T, ch <-chan interface{}) { select { case <-ch: assert.True(t, false) default: } } // Head events are propagated on HeadEvents. func TestHeadEvents(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) chainStore := newChainStore(dstP) requirePutTestChain(t, chainStore, dstP) ps := chainStore.HeadEvents() chA := ps.Sub(chain.NewHeadTopic) chB := ps.Sub(chain.NewHeadTopic) assertSetHead(t, chainStore, dstP.genTS) assertSetHead(t, chainStore, dstP.link1) assertSetHead(t, chainStore, dstP.link2) assertSetHead(t, chainStore, dstP.link3) assertSetHead(t, chainStore, dstP.link4) assertSetHead(t, chainStore, dstP.link3) assertSetHead(t, chainStore, dstP.link2) assertSetHead(t, chainStore, dstP.link1) assertSetHead(t, chainStore, dstP.genTS) heads := []types.TipSet{dstP.genTS, dstP.link1, dstP.link2, dstP.link3, dstP.link4, dstP.link3, dstP.link2, dstP.link1, dstP.genTS} // Heads arrive in the expected order for i := 0; i < 9; i++ { headA := <-chA headB := <-chB assert.Equal(t, headA, headB) assert.Equal(t, headA, heads[i]) } // No extra notifications assertEmptyCh(t, chA) assertEmptyCh(t, chB) } /* Loading */ // Load does not error and gives the chain store access to all blocks and // tipset indexes along the heaviest chain. func TestLoadAndReboot(t *testing.T) { tf.UnitTest(t) dstP := initDSTParams() ctx := context.Background() initStoreTest(ctx, t, dstP) r := repo.NewInMemoryRepo() ds := r.Datastore() chainStore := chain.NewDefaultStore(ds, dstP.genCid) requirePutTestChain(t, chainStore, dstP) assertSetHead(t, chainStore, dstP.genTS) // set the genesis block assertSetHead(t, chainStore, dstP.link4) chainStore.Stop() // rebuild chain with same datastore rebootChain := chain.NewDefaultStore(ds, dstP.genCid) err := rebootChain.Load(ctx) assert.NoError(t, err) // Check that chain store has index // Get a tipset and state by key got2 := requireGetTipSet(ctx, t, rebootChain, dstP.link2.ToSortedCidSet()) assert.Equal(t, dstP.link2, got2) // Get another by parent key got4 := requireGetTsasByParentAndHeight(t, rebootChain, dstP.link3.String(), uint64(6)) assert.Equal(t, 1, len(got4)) assert.Equal(t, dstP.link4, got4[0].TipSet) // Check that chainStore store has blocks assert.True(t, rebootChain.HasBlock(ctx, dstP.link3blk1.Cid())) assert.True(t, rebootChain.HasBlock(ctx, dstP.link2blk3.Cid())) assert.True(t, rebootChain.HasBlock(ctx, dstP.genesis.Cid())) }
1
19,358
This should be renamed to `chain.Store`. It was named `DefaultStore` as an alternative to `StoreImpl`, which is the (bad) Java convention.
filecoin-project-venus
go
@@ -85,8 +85,12 @@ func (a *AttachDisks) validate(ctx context.Context, s *Step) (errs DError) { ad.zone = disk["zone"] // Register disk attachments. - errs = addErrs(errs, s.w.instances.w.disks.regAttach(ad.Source, ad.Instance, ad.Mode, s)) - + // If device name is not given explicitly, its device name will be the same as disk name. + deviceName := ad.Source + if ad.DeviceName != "" { + deviceName = ad.DeviceName + } + errs = addErrs(errs, s.w.instances.w.disks.regAttach(deviceName, ad.Source, ad.Instance, ad.Mode, s)) } return errs }
1
// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package daisy import ( "context" "path" "sync" "google.golang.org/api/compute/v1" ) // AttachDisks is a Daisy AttachDisks workflow step. type AttachDisks []*AttachDisk // AttachDisk is used to attach a GCE disk into an instance. type AttachDisk struct { compute.AttachedDisk // Instance to attach to. Instance string project, zone string } func (a *AttachDisks) populate(ctx context.Context, s *Step) DError { for _, ad := range *a { ad.Mode = strOr(ad.Mode, defaultDiskMode) if ad.DeviceName == "" { ad.DeviceName = path.Base(ad.Source) } if diskURLRgx.MatchString(ad.Source) { ad.Source = extendPartialURL(ad.Source, s.w.Project) } } return nil } func (a *AttachDisks) validate(ctx context.Context, s *Step) (errs DError) { for _, ad := range *a { if !checkDiskMode(ad.Mode) { errs = addErrs(errs, Errf("cannot attach disk: bad disk mode: %q", ad.Mode)) } if ad.Source == "" { errs = addErrs(errs, Errf("cannot attach disk: AttachedDisk.Source is empty")) } ir, err := s.w.instances.regUse(ad.Instance, s) if ir == nil { // Return now, the rest of this function can't be run without ir. return addErrs(errs, Errf("cannot attach disk: %v", err)) } addErrs(errs, err) dr, err := s.w.disks.regUse(ad.Source, s) if dr == nil { // Return now, the rest of this function can't be run without dr. return addErrs(errs, Errf("cannot attach disk: %v", err)) } addErrs(errs, err) // Ensure disk is in the same project and zone. disk := NamedSubexp(diskURLRgx, dr.link) instance := NamedSubexp(instanceURLRgx, ir.link) if disk["project"] != instance["project"] { errs = addErrs(errs, Errf("cannot attach disk in project %q to instance in project %q: %q", disk["project"], instance["project"], ad.Source)) } if disk["zone"] != instance["zone"] { errs = addErrs(errs, Errf("cannot attach disk in zone %q to instance in zone %q: %q", disk["zone"], instance["zone"], ad.Source)) } ad.project = disk["project"] ad.zone = disk["zone"] // Register disk attachments. errs = addErrs(errs, s.w.instances.w.disks.regAttach(ad.Source, ad.Instance, ad.Mode, s)) } return errs } func (a *AttachDisks) run(ctx context.Context, s *Step) DError { var wg sync.WaitGroup w := s.w e := make(chan DError) for _, ad := range *a { wg.Add(1) go func(ad *AttachDisk) { defer wg.Done() if diskRes, ok := w.disks.get(ad.Source); ok { ad.Source = diskRes.link } inst := ad.Instance if instRes, ok := w.instances.get(ad.Instance); ok { inst = instRes.link ad.Instance = instRes.RealName } w.LogStepInfo(s.name, "AttachDisks", "Attaching disk %q to instance %q.", ad.AttachedDisk.Source, inst) if err := w.ComputeClient.AttachDisk(ad.project, ad.zone, ad.Instance, &ad.AttachedDisk); err != nil { e <- newErr("failed to attach disk", err) return } }(ad) } go func() { wg.Wait() e <- nil }() select { case err := <-e: return err case <-w.Cancel: wg.Wait() return nil } }
1
11,524
ad.Source is disk name? Shouldn't the order of initialization be reversed here then? First set ad.DeviceName, if it's empty, set ad.Source? Or this is a different use case? This is the third place where this device/disk default logic is implemented, so another reason to move it to regAttach, if it's possible to be generalized.
GoogleCloudPlatform-compute-image-tools
go
@@ -26,6 +26,11 @@ namespace Microsoft.DotNet.Build.Tasks.Feed [Output] public ITaskItem[] PackageInfos { get; set; } + /// <summary> + /// Specify max number of minutes that the lock of the feed container will be hold. + /// </summary> + public int FeedLockTimeoutMinutes { get; set; } = int.MaxValue; + public override bool Execute() { return ExecuteAsync().GetAwaiter().GetResult();
1
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Build.Framework; using NuGet.Packaging.Core; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using MSBuild = Microsoft.Build.Utilities; namespace Microsoft.DotNet.Build.Tasks.Feed { public class GetBlobFeedPackageList : BuildTask { private const string NuGetPackageInfoId = "PackageId"; private const string NuGetPackageInfoVersion = "PackageVersion"; [Required] public string ExpectedFeedUrl { get; set; } [Required] public string AccountKey { get; set; } [Output] public ITaskItem[] PackageInfos { get; set; } public override bool Execute() { return ExecuteAsync().GetAwaiter().GetResult(); } private async Task<bool> ExecuteAsync() { try { Log.LogMessage(MessageImportance.High, "Listing blob feed packages..."); BlobFeedAction action = new BlobFeedAction(ExpectedFeedUrl, AccountKey, Log); ISet<PackageIdentity> packages = await action.GetPackageIdentitiesAsync(); PackageInfos = packages.Select(ConvertToPackageInfoItem).ToArray(); } catch (Exception e) { Log.LogErrorFromException(e, true); } return !Log.HasLoggedErrors; } private ITaskItem ConvertToPackageInfoItem(PackageIdentity identity) { var metadata = new Dictionary<string, string> { [NuGetPackageInfoId] = identity.Id, [NuGetPackageInfoVersion] = identity.Version.ToString() }; return new MSBuild.TaskItem(identity.ToString(), metadata); } } }
1
15,346
typo: "be hold" -> "be held"
dotnet-buildtools
.cs
@@ -44,10 +44,10 @@ namespace Nethermind.Blockchain.Receipts public long? LowestInsertedReceiptBlockNumber { - get => long.MaxValue; + get => 1; set { } } public long MigratedBlockNumber { get; set; } = 0; } -} +}
1
// Copyright (c) 2018 Demerzel Solutions Limited // This file is part of the Nethermind library. // // The Nethermind library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The Nethermind library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the Nethermind. If not, see <http://www.gnu.org/licenses/>. using System; using System.Collections.Generic; using Nethermind.Core; using Nethermind.Core.Crypto; namespace Nethermind.Blockchain.Receipts { public class NullReceiptStorage : IReceiptStorage { public static NullReceiptStorage Instance { get; } = new NullReceiptStorage(); public Keccak FindBlockHash(Keccak hash) => null; private NullReceiptStorage() { } public void Insert(Block block, params TxReceipt[] txReceipts) { } public TxReceipt[] Get(Block block) => Array.Empty<TxReceipt>(); public TxReceipt[] Get(Keccak blockHash) => Array.Empty<TxReceipt>(); public bool CanGetReceiptsByHash(long blockNumber) => true; public bool TryGetReceiptsIterator(long blockNumber, Keccak blockHash, out ReceiptsIterator iterator) { iterator = new ReceiptsIterator(); return false; } public long? LowestInsertedReceiptBlockNumber { get => long.MaxValue; set { } } public long MigratedBlockNumber { get; set; } = 0; } }
1
24,722
I do not think we should have any logic inside the null storage. 1 has a very specific magical meaning. We should either return null here or long.maxValue.
NethermindEth-nethermind
.cs
@@ -397,7 +397,7 @@ func TestInitProcessEnabled(t *testing.T) { } agent := RunAgent(t, nil) defer agent.Cleanup() - agent.RequireVersion(">=1.14.5") + agent.RequireVersion(">=1.15.0") td, err := GetTaskDefinition("init-process") if err != nil {
1
// +build functional,!windows // Copyright 2014-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"). You may // not use this file except in compliance with the License. A copy of the // License is located at // // http://aws.amazon.com/apache2.0/ // // or in the "license" file accompanying this file. This file is distributed // on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language governing // permissions and limitations under the License. // Package simpletest is an auto-generated set of tests defined by the json // descriptions in testdata/simpletests. // // This file should not be edited; rather you should edit the generator instead package simpletest import ( "os" "testing" "time" . "github.com/aws/amazon-ecs-agent/agent/functional_tests/util" ) // TestAddAndDropCapabilities checks that adding and dropping Linux capabilities work func TestAddAndDropCapabilities(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("add-drop-capabilities") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDataVolume Check that basic data volumes work func TestDataVolume(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("datavolume") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDataVolume2 Verify that more complex datavolumes (including empty and volumes-from) work as expected; see Related func TestDataVolume2(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">1.0.0") td, err := GetTaskDefinition("datavolume2") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDevices checks that adding devices works func TestDevices(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("devices") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDisableNetworking Check that disable networking works func TestDisableNetworking(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("network-disabled") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDnsSearchDomains Check that dns search domains works func TestDnsSearchDomains(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("dns-search-domains") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestDnsServers Check that dns servers works func TestDnsServers(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("dns-servers") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestExtraHosts Check that extra hosts works func TestExtraHosts(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("extra-hosts") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestHostname Check that hostname works func TestHostname(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("hostname") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestInitProcessEnabled checks that enabling init process works func TestInitProcessEnabled(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.14.5") td, err := GetTaskDefinition("init-process") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestLinkVolumeDependencies Tests that the dependency graph of task definitions is resolved correctly func TestLinkVolumeDependencies(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("network-link-2") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestNetworkLink Tests that basic network linking works func TestNetworkLink(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("network-link") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestParallelPull check docker pull in parallel works for docker >= 1.11.1 func TestParallelPull(t *testing.T) { // Test only available for docker version >=1.11.1 RequireDockerVersion(t, ">=1.11.1") // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("parallel-pull") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 4) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("1m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } // Make sure the task is running for _, testTask := range testTasks { err = testTask.WaitRunning(timeout) if err != nil { t.Errorf("Timed out waiting for task to reach running. Error %v, task %v", err, testTask) } } // Cleanup, stop all the tasks and wait for the containers to be stopped for _, testTask := range testTasks { err = testTask.Stop() if err != nil { t.Errorf("Failed to stop task, Error %v, task %v", err, testTask) } } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } defer agent.SweepTask(testTask) } } // TestPrivileged Check that privileged works func TestPrivileged(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("privileged") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestReadonlyRootfs Check that readonly rootfs works func TestReadonlyRootfs(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("readonly-rootfs") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestSecurityOptNoNewPrivileges Check that security-opt=no-new-privileges works func TestSecurityOptNoNewPrivileges(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.12.1") td, err := GetTaskDefinition("security-opt-nonewprivileges") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestSimpleExit Tests that the basic premis of this testing fromwork works (e.g. exit codes go through, etc) func TestSimpleExit(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.0.0") td, err := GetTaskDefinition("simple-exit") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestNofilesULimit Check that nofiles ulimit works func TestNofilesULimit(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("nofiles-ulimit") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestUserNobody Check that user works func TestUserNobody(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("user-nobody") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } } // TestWorkingDir Check that working dir works func TestWorkingDir(t *testing.T) { // Parallel is opt in because resource constraints could cause test failures // on smaller instances if os.Getenv("ECS_FUNCTIONAL_PARALLEL") != "" { t.Parallel() } agent := RunAgent(t, nil) defer agent.Cleanup() agent.RequireVersion(">=1.5.0") td, err := GetTaskDefinition("working-dir") if err != nil { t.Fatalf("Could not register task definition: %v", err) } testTasks, err := agent.StartMultipleTasks(t, td, 1) if err != nil { t.Fatalf("Could not start task: %v", err) } timeout, err := time.ParseDuration("2m") if err != nil { t.Fatalf("Could not parse timeout: %#v", err) } for _, testTask := range testTasks { err = testTask.WaitStopped(timeout) if err != nil { t.Fatalf("Timed out waiting for task to reach stopped. Error %#v, task %#v", err, testTask) } if exit, ok := testTask.ContainerExitcode("exit"); !ok || exit != 42 { t.Errorf("Expected exit to exit with 42; actually exited (%v) with %v", ok, exit) } defer agent.SweepTask(testTask) } }
1
18,463
Why did this change?
aws-amazon-ecs-agent
go
@@ -168,7 +168,6 @@ class PlansController < ApplicationController render('/phases/edit', locals: { base_template_org: phase.template.base_org, plan: plan, phase: phase, readonly: readonly, - question_guidance: plan.guidance_by_question_as_hash, guidance_groups: guidance_groups, answers: answers, guidance_service: GuidanceService.new(plan) })
1
require 'pp' class PlansController < ApplicationController include ConditionalUserMailer helper PaginableHelper helper SettingsTemplateHelper include FeedbacksHelper after_action :verify_authorized, except: [:overview] def index authorize Plan @plans = Plan.active(current_user).page(1) @organisationally_or_publicly_visible = Plan.organisationally_or_publicly_visible(current_user).page(1) end # GET /plans/new # ------------------------------------------------------------------------------------ def new @plan = Plan.new authorize @plan # Get all of the available funders and non-funder orgs @funders = Org.funder.joins(:templates).where(templates: {published: true}).uniq.sort{|x,y| x.name <=> y.name } @orgs = (Org.organisation + Org.institution + Org.managing_orgs).flatten.uniq.sort{|x,y| x.name <=> y.name } # Get the current user's org @default_org = current_user.org if @orgs.include?(current_user.org) flash[:notice] = "#{_('This is a')} <strong>#{_('test plan')}</strong>" if params[:test] @is_test = params[:test] ||= false respond_to :html end # POST /plans # ------------------------------------------------------------------- def create @plan = Plan.new authorize @plan # We set these ids to -1 on the page to trick ariatiseForm into allowing the autocomplete to be blank if # the no org/funder checkboxes are checked off org_id = (plan_params[:org_id] == '-1' ? '' : plan_params[:org_id]) funder_id = (plan_params[:funder_id] == '-1' ? '' : plan_params[:funder_id]) # If the template_id is blank then we need to look up the available templates and return JSON if plan_params[:template_id].blank? # Something went wrong there should always be a template id respond_to do |format| flash[:alert] = _('Unable to identify a suitable template for your plan.') format.html { redirect_to new_plan_path } end else # Otherwise create the plan @plan.principal_investigator = current_user.surname.blank? ? nil : "#{current_user.firstname} #{current_user.surname}" @plan.principal_investigator_email = current_user.email orcid = current_user.identifier_for(IdentifierScheme.find_by(name: 'orcid')) @plan.principal_investigator_identifier = orcid.identifier unless orcid.nil? @plan.funder_name = plan_params[:funder_name] @plan.visibility = (plan_params['visibility'].blank? ? Rails.application.config.default_plan_visibility : plan_params[:visibility]) @plan.template = Template.find(plan_params[:template_id]) if plan_params[:title].blank? @plan.title = current_user.firstname.blank? ? _('My Plan') + '(' + @plan.template.title + ')' : current_user.firstname + "'s" + _(" Plan") else @plan.title = plan_params[:title] end if @plan.save @plan.assign_creator(current_user) # pre-select org's guidance and the default org's guidance ids = (Org.managing_orgs << org_id).flatten.uniq ggs = GuidanceGroup.where(org_id: ids, optional_subset: false, published: true) if !ggs.blank? then @plan.guidance_groups << ggs end default = Template.default msg = "#{success_message(_('plan'), _('created'))}<br />" if !default.nil? && default == @plan.template # We used the generic/default template msg += " #{_('This plan is based on the default template.')}" elsif [email protected]_of.nil? # We used a customized version of the the funder template msg += " #{_('This plan is based on the')} #{plan_params[:funder_name]}: '#{@plan.template.title}' #{_('template with customisations by the')} #{plan_params[:org_name]}" else # We used the specified org's or funder's template msg += " #{_('This plan is based on the')} #{@plan.template.org.name}: '#{@plan.template.title}' template." end respond_to do |format| flash[:notice] = msg format.html { redirect_to plan_path(@plan) } end else # Something went wrong so report the issue to the user respond_to do |format| flash[:alert] = failed_create_error(@plan, 'Plan') format.html { redirect_to new_plan_path } end end end end # GET /plans/show def show @plan = Plan.eager_load(params[:id]) authorize @plan @visibility = @plan.visibility.present? ? @plan.visibility.to_s : Rails.application.config.default_plan_visibility @editing = (!params[:editing].nil? && @plan.administerable_by?(current_user.id)) # Get all Guidance Groups applicable for the plan and group them by org @all_guidance_groups = @plan.get_guidance_group_options @all_ggs_grouped_by_org = @all_guidance_groups.sort.group_by(&:org) @selected_guidance_groups = @plan.guidance_groups # Important ones come first on the page - we grab the user's org's GGs and "Organisation" org type GGs @important_ggs = [] @important_ggs << [current_user.org, @all_ggs_grouped_by_org[current_user.org]] if @all_ggs_grouped_by_org.include?(current_user.org) @all_ggs_grouped_by_org.each do |org, ggs| if org.organisation? @important_ggs << [org,ggs] end # If this is one of the already selected guidance groups its important! if !(ggs & @selected_guidance_groups).empty? @important_ggs << [org,ggs] unless @important_ggs.include?([org,ggs]) end end # Sort the rest by org name for the accordion @important_ggs = @important_ggs.sort_by{|org,gg| (org.nil? ? '' : org.name)} @all_ggs_grouped_by_org = @all_ggs_grouped_by_org.sort_by {|org,gg| (org.nil? ? '' : org.name)} @selected_guidance_groups = @selected_guidance_groups.collect{|gg| gg.id} @based_on = (@plan.template.customization_of.nil? ? @plan.template : Template.where(family_id: @plan.template.customization_of).first) respond_to :html end # GET /plans/:plan_id/phases/:id/edit def edit plan = Plan.find(params[:id]) authorize plan plan, phase = Plan.load_for_phase(params[:id], params[:phase_id]) readonly = !plan.editable_by?(current_user.id) guidance_groups = GuidanceGroup.where(published: true, id: plan.guidance_group_ids) # Since the answers have been pre-fetched through plan (see Plan.load_for_phase) # we create a hash whose keys are question id and value is the answer associated answers = plan.answers.reduce({}){ |m, a| m[a.question_id] = a; m } render('/phases/edit', locals: { base_template_org: phase.template.base_org, plan: plan, phase: phase, readonly: readonly, question_guidance: plan.guidance_by_question_as_hash, guidance_groups: guidance_groups, answers: answers, guidance_service: GuidanceService.new(plan) }) end # PUT /plans/1 # PUT /plans/1.json def update @plan = Plan.find(params[:id]) authorize @plan attrs = plan_params respond_to do |format| begin # Save the guidance group selections guidance_group_ids = params[:guidance_group_ids].blank? ? [] : params[:guidance_group_ids].map(&:to_i).uniq @plan.guidance_groups = GuidanceGroup.where(id: guidance_group_ids) @plan.save if @plan.update_attributes(attrs) format.html { redirect_to overview_plan_path(@plan), notice: success_message(_('plan'), _('saved')) } format.json {render json: {code: 1, msg: success_message(_('plan'), _('saved'))}} else flash[:alert] = failed_update_error(@plan, _('plan')) format.html { render action: "edit" } format.json {render json: {code: 0, msg: flash[:alert]}} end rescue Exception flash[:alert] = failed_update_error(@plan, _('plan')) format.html { render action: "edit" } format.json {render json: {code: 0, msg: flash[:alert]}} end end end def share @plan = Plan.find(params[:id]) if @plan.present? authorize @plan # Get the roles where the user is not a reviewer @plan_roles = @plan.roles.select{ |r| !r.reviewer? } else redirect_to(plans_path) end end def destroy @plan = Plan.find(params[:id]) authorize @plan if @plan.destroy respond_to do |format| format.html { redirect_to plans_url, notice: success_message(_('plan'), _('deleted')) } end else respond_to do |format| flash[:alert] = failed_create_error(@plan, _('plan')) format.html { render action: "edit" } end end end # GET /status/1.json # only returns json, why is this here? def status @plan = Plan.find(params[:id]) authorize @plan respond_to do |format| format.json { render json: @plan.status } end end def answer @plan = Plan.find(params[:id]) authorize @plan if !params[:q_id].nil? respond_to do |format| format.json { render json: @plan.answer(params[:q_id], false).to_json(:include => :options) } end else respond_to do |format| format.json { render json: {} } end end end def download @plan = Plan.find(params[:id]) authorize @plan @phase_options = @plan.phases.order(:number).pluck(:title,:id) @export_settings = @plan.settings(:export) render 'download' end def export @plan = Plan.includes(:answers).find(params[:id]) authorize @plan @show_coversheet = params[:export][:project_details].present? @show_sections_questions = params[:export][:question_headings].present? @show_unanswered = params[:export][:unanswered_questions].present? @public_plan = false @hash = @plan.as_pdf(@show_coversheet) @formatting = params[:export][:formatting] || @plan.settings(:export).formatting file_name = @plan.title.gsub(/ /, "_") respond_to do |format| format.html { render layout: false } format.csv { send_data @plan.as_csv(@show_sections_questions), filename: "#{file_name}.csv" } format.text { send_data render_to_string(partial: 'shared/export/plan_txt'), filename: "#{file_name}.txt" } format.docx { render docx: "#{file_name}.docx", content: render_to_string(partial: 'shared/export/plan') } format.pdf do render pdf: file_name, margin: @formatting[:margin], footer: { center: _('Created using the %{application_name}. Last modified %{date}') % {application_name: Rails.configuration.branding[:application][:name], date: l(@plan.updated_at.to_date, formats: :short)}, font_size: 8, spacing: (Integer(@formatting[:margin][:bottom]) / 2) - 4, right: '[page] of [topage]' } end end end def duplicate plan = Plan.find(params[:id]) authorize plan @plan = Plan.deep_copy(plan) respond_to do |format| if @plan.save @plan.assign_creator(current_user) format.html { redirect_to @plan, notice: success_message(_('plan'), _('copied')) } else format.html { redirect_to plans_path, alert: failed_create_error(@plan, 'Plan') } end end end # POST /plans/:id/visibility def visibility plan = Plan.find(params[:id]) if plan.present? authorize plan if plan.visibility_allowed? plan.visibility = plan_params[:visibility] if plan.save deliver_if(recipients: plan.owner_and_coowners, key: 'owners_and_coowners.visibility_changed') do |r| UserMailer.plan_visibility(r,plan).deliver_now() end render status: :ok, json: { msg: success_message(_('plan\'s visibility'), _('changed')) } else render status: :internal_server_error, json: { msg: _('Error raised while saving the visibility for plan id %{plan_id}') %{ :plan_id => params[:id]} } end else render status: :forbidden, json: { msg: _('Unable to change the plan\'s status since it is needed at least '\ '%{percentage} percentage responded') %{ :percentage => Rails.application.config.default_plan_percentage_answered } } end else render status: :not_found, json: { msg: _('Unable to find plan id %{plan_id}') %{ :plan_id => params[:id]} } end end def set_test plan = Plan.find(params[:id]) authorize plan plan.visibility = (params[:is_test] === "1" ? :is_test : :privately_visible) if plan.save render json: {code: 1, msg: (plan.is_test? ? _('Your project is now a test.') : _('Your project is no longer a test.') )} else render status: :bad_request, json: {code: 0, msg: _("Unable to change the plan's test status")} end end def request_feedback @plan = Plan.find(params[:id]) authorize @plan alert = _('Unable to submit your request for feedback at this time.') begin if @plan.request_feedback(current_user) redirect_to share_plan_path(@plan), notice: _(request_feedback_flash_notice) else redirect_to share_plan_path(@plan), alert: alert end rescue Exception redirect_to share_plan_path(@plan), alert: alert end end def overview begin plan = Plan.overview(params[:id]) authorize plan render(:overview, locals: { plan: plan }) rescue ActiveRecord::RecordNotFound flash[:alert] = _('There is no plan associated with id %{id}') %{ :id => params[:id] } redirect_to(action: :index) end end private def plan_params params.require(:plan).permit(:org_id, :org_name, :funder_id, :funder_name, :template_id, :title, :visibility, :grant_number, :description, :identifier, :principal_investigator, :principal_investigator_email, :principal_investigator_identifier, :data_contact, :data_contact_email, :data_contact_phone, :guidance_group_ids) end # different versions of the same template have the same family_id # but different version numbers so for each set of templates with the # same family_id choose the highest version number. def get_most_recent( templates ) groups = Hash.new templates.each do |t| k = t.family_id if !groups.has_key?(k) groups[k] = t else other = groups[k] if other.version < t.version groups[k] = t end end end groups.values end def fixup_hash(plan) rollup(plan, "notes", "answer_id", "answers") rollup(plan, "answers", "question_id", "questions") rollup(plan, "questions", "section_id", "sections") rollup(plan, "sections", "phase_id", "phases") plan["template"]["phases"] = plan.delete("phases") ghash = {} plan["guidance_groups"].map{|g| ghash[g["id"]] = g} plan["plans_guidance_groups"].each do |pgg| pgg["guidance_group"] = ghash[ pgg["guidance_group_id"] ] end plan["template"]["org"] = Org.find(plan["template"]["org_id"]).serializable_hash() end # find all object under src_plan_key # merge them into the items under obj_plan_key using # super_id = id # so we have answers which each have a question_id # rollup(plan, "answers", "quesiton_id", "questions") # will put the answers into the right questions. def rollup(plan, src_plan_key, super_id, obj_plan_key) id_to_obj = Hash.new() plan[src_plan_key].each do |o| id = o[super_id] if !id_to_obj.has_key?(id) id_to_obj[id] = Array.new end id_to_obj[id] << o end plan[obj_plan_key].each do |o| id = o["id"] if id_to_obj.has_key?(id) o[src_plan_key] = id_to_obj[ id ] end end plan.delete(src_plan_key) end # Flash notice for successful feedback requests # # @return [String] def request_feedback_flash_notice # Use the generic feedback confirmation message unless the Org has # specified one text = current_user.org.feedback_email_msg || feedback_confirmation_default_message feedback_constant_to_text(text, current_user, @plan, current_user.org) end end
1
17,916
Removed this method. Doesn't seem to be used in the code, and the variable set here doesn't seem to be used in the views.
DMPRoadmap-roadmap
rb
@@ -16,7 +16,12 @@ namespace Microsoft.CodeAnalysis.Sarif { internal const string DEFAULT_POLICY_NAME = "default"; - public PropertyBagDictionary() : base() { } + public PropertyBagDictionary() : this(null) { } + + public PropertyBagDictionary(PropertyBagDictionary initializer) : + this(initializer, null) + { + } public PropertyBagDictionary( PropertyBagDictionary initializer = null,
1
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel; using System.IO; using System.Runtime.Serialization; using System.Xml; namespace Microsoft.CodeAnalysis.Sarif { [Serializable] public class PropertyBagDictionary : TypedPropertyBagDictionary<object> { internal const string DEFAULT_POLICY_NAME = "default"; public PropertyBagDictionary() : base() { } public PropertyBagDictionary( PropertyBagDictionary initializer = null, IEqualityComparer<string> comparer = null) : base(initializer, comparer) { } protected PropertyBagDictionary(SerializationInfo info, StreamingContext context) : base(info, context) { } public string Name { get; set; } public virtual T GetProperty<T>(PerLanguageOption<T> setting, bool cacheDefault = true) { if (setting == null) { throw new ArgumentNullException(nameof(setting)); } PropertyBagDictionary properties = GetSettingsContainer(setting, cacheDefault); T value; if (!properties.TryGetProperty(setting.Name, out value) && setting.DefaultValue != null) { value = setting.DefaultValue(); if (cacheDefault) { properties[setting.Name] = value; } } return value; } public override void SetProperty(IOption setting, object value, bool cacheDescription = false) { if (setting == null) { throw new ArgumentNullException(nameof(setting)); } PropertyBagDictionary properties = GetSettingsContainer(setting, true); if (value == null && properties.ContainsKey(setting.Name)) { properties.Remove(setting.Name); return; } if (cacheDescription) { SettingNameToDescriptionsMap = SettingNameToDescriptionsMap ?? new Dictionary<string, string>(); SettingNameToDescriptionsMap[setting.Name] = setting.Description; } properties[setting.Name] = value; } internal bool TryGetProperty<T>(string key, out T value) { value = default(T); object result; if (this.TryGetValue(key, out result)) { if (result is T) { value = (T)result; return true; } return TryConvertFromString((string)result, out value); } return false; } private PropertyBagDictionary GetSettingsContainer(IOption setting, bool cacheDefault) { PropertyBagDictionary properties = this; if (String.IsNullOrEmpty(Name)) { object propertiesObject; string featureOptionsName = setting.Feature + ".Options"; if (!TryGetValue(featureOptionsName, out propertiesObject)) { properties = new PropertyBagDictionary(); if (cacheDefault) { this[featureOptionsName] = properties; } properties.Name = featureOptionsName; } else { properties = (PropertyBagDictionary)propertiesObject; } } return properties; } private static bool TryConvertFromString<T>(string source, out T destination) { destination = default(T); if (source == null) return false; TypeConverter converter = TypeDescriptor.GetConverter(typeof(T)); destination = (T)converter.ConvertFrom(source); return destination != null; } public void SaveTo(string filePath, string id) { using (var writer = new FileStream(filePath, FileMode.Create, FileAccess.Write)) SaveTo(writer, id); } public void SaveTo(Stream stream, string id) { var settings = new XmlWriterSettings { Indent = true }; using (XmlWriter writer = XmlWriter.Create(stream, settings)) { this.SavePropertyBagToStream(writer, settings, id, SettingNameToDescriptionsMap); } } public void LoadFrom(string filePath) { using (var reader = new FileStream(filePath, FileMode.Open, FileAccess.Read)) LoadFrom(reader); } public void LoadFrom(Stream stream) { using (XmlReader reader = XmlReader.Create(stream)) { if (reader.IsStartElement(PropertyBagExtensionMethods.PROPERTIES_ID)) { bool isEmpty = reader.IsEmptyElement; this.Clear(); // Note: we do not recover the property bag id // as there is no current product use for the value reader.ReadStartElement(PropertyBagExtensionMethods.PROPERTIES_ID); this.LoadPropertiesFromXmlStream(reader); if (!isEmpty) reader.ReadEndElement(); } } } // Current consumers of this data expect that child namespaces // will always precede parent namespaces, if also included. public static ImmutableArray<string> DefaultNamespaces = new List<string>( new string[] { "Microsoft.CodeAnalysis.Options.", "Microsoft.CodeAnalysis." }).ToImmutableArray(); } }
1
11,058
Remove the initializers ("`= null`") from both parameters. Remember that that's why we added the overloads: to ensure that all languages could have the convenience of the abbreviated argument lists (not all languages understand default parameters).
microsoft-sarif-sdk
.cs
@@ -0,0 +1,13 @@ +package values + +import ( + "github.com/influxdata/flux/semantic" +) + +type Vector interface { + Value + ElementType() semantic.MonoType + Get(i int) Value + Set(i int, value Value) + Len() int +}
1
1
17,607
I think you can just remove most of these attributes outside of `ElementType()`. Instead, this should expose the arrow array. We're not going to use these methods anyway.
influxdata-flux
go
@@ -5,8 +5,7 @@ import { enqueueRender } from '../render-queue'; import { getNodeProps } from './index'; import { diff, mounts, diffLevel, flushMounts, recollectNodeTree, removeChildren } from './diff'; import { createComponent, collectComponent } from './component-recycler'; - - +import remove from './remove'; /** Set a component's `props` (generally derived from JSX attributes). * @param {Object} props
1
import { SYNC_RENDER, NO_RENDER, FORCE_RENDER, ASYNC_RENDER, ATTR_KEY } from '../constants'; import options from '../options'; import { extend } from '../util'; import { enqueueRender } from '../render-queue'; import { getNodeProps } from './index'; import { diff, mounts, diffLevel, flushMounts, recollectNodeTree, removeChildren } from './diff'; import { createComponent, collectComponent } from './component-recycler'; /** Set a component's `props` (generally derived from JSX attributes). * @param {Object} props * @param {Object} [opts] * @param {boolean} [opts.renderSync=false] If `true` and {@link options.syncComponentUpdates} is `true`, triggers synchronous rendering. * @param {boolean} [opts.render=true] If `false`, no render will be triggered. */ export function setComponentProps(component, props, opts, context, mountAll) { if (component._disable) return; component._disable = true; if ((component.__ref = props.ref)) delete props.ref; if ((component.__key = props.key)) delete props.key; if (!component.base || mountAll) { if (component.componentWillMount) component.componentWillMount(); } else if (component.componentWillReceiveProps) { component.componentWillReceiveProps(props, context); } if (context && context!==component.context) { if (!component.prevContext) component.prevContext = component.context; component.context = context; } if (!component.prevProps) component.prevProps = component.props; component.props = props; component._disable = false; if (opts!==NO_RENDER) { if (opts===SYNC_RENDER || options.syncComponentUpdates!==false || !component.base) { renderComponent(component, SYNC_RENDER, mountAll); } else { enqueueRender(component); } } if (component.__ref) component.__ref(component); } /** Render a Component, triggering necessary lifecycle events and taking High-Order Components into account. * @param {Component} component * @param {Object} [opts] * @param {boolean} [opts.build=false] If `true`, component will build and store a DOM node if not already associated with one. * @private */ export function renderComponent(component, opts, mountAll, isChild) { if (component._disable) return; let props = component.props, state = component.state, context = component.context, previousProps = component.prevProps || props, previousState = component.prevState || state, previousContext = component.prevContext || context, isUpdate = component.base, nextBase = component.nextBase, initialBase = isUpdate || nextBase, initialChildComponent = component._component, skip = false, rendered, inst, cbase; // if updating if (isUpdate) { component.props = previousProps; component.state = previousState; component.context = previousContext; if (opts!==FORCE_RENDER && component.shouldComponentUpdate && component.shouldComponentUpdate(props, state, context) === false) { skip = true; } else if (component.componentWillUpdate) { component.componentWillUpdate(props, state, context); } component.props = props; component.state = state; component.context = context; } component.prevProps = component.prevState = component.prevContext = component.nextBase = null; component._dirty = false; if (!skip) { rendered = component.render(props, state, context); // context to pass to the child, can be updated via (grand-)parent component if (component.getChildContext) { context = extend(extend({}, context), component.getChildContext()); } let childComponent = rendered && rendered.nodeName, toUnmount, base; if (typeof childComponent==='function') { // set up high order component link let childProps = getNodeProps(rendered); inst = initialChildComponent; if (inst && inst.constructor===childComponent && childProps.key==inst.__key) { setComponentProps(inst, childProps, SYNC_RENDER, context, false); } else { toUnmount = inst; component._component = inst = createComponent(childComponent, childProps, context); inst.nextBase = inst.nextBase || nextBase; inst._parentComponent = component; setComponentProps(inst, childProps, NO_RENDER, context, false); renderComponent(inst, SYNC_RENDER, mountAll, true); } base = inst.base; } else { cbase = initialBase; // destroy high order component link toUnmount = initialChildComponent; if (toUnmount) { cbase = component._component = null; } if (initialBase || opts===SYNC_RENDER) { if (cbase) cbase._component = null; base = diff(cbase, rendered, context, mountAll || !isUpdate, initialBase && initialBase.parentNode, true); } } if (initialBase && base!==initialBase && inst!==initialChildComponent) { let baseParent = initialBase.parentNode; if (baseParent && base!==baseParent) { baseParent.replaceChild(base, initialBase); if (!toUnmount) { initialBase._component = null; recollectNodeTree(initialBase, false); } } } if (toUnmount) { unmountComponent(toUnmount); } component.base = base; if (base && !isChild) { let componentRef = component, t = component; while ((t=t._parentComponent)) { (componentRef = t).base = base; } base._component = componentRef; base._componentConstructor = componentRef.constructor; } } if (!isUpdate || mountAll) { mounts.unshift(component); } else if (!skip) { if (component.componentDidUpdate) { component.componentDidUpdate(previousProps, previousState, previousContext); } if (options.afterUpdate) options.afterUpdate(component); } if (component._renderCallbacks!=null) { while (component._renderCallbacks.length) component._renderCallbacks.pop().call(component); } if (!diffLevel && !isChild) flushMounts(); } /** Apply the Component referenced by a VNode to the DOM. * @param {Element} dom The DOM node to mutate * @param {VNode} vnode A Component-referencing VNode * @returns {Element} dom The created/mutated element * @private */ export function buildComponentFromVNode(dom, vnode, context, mountAll) { let c = dom && dom._component, originalComponent = c, oldDom = dom, isDirectOwner = c && dom._componentConstructor===vnode.nodeName, isOwner = isDirectOwner, props = getNodeProps(vnode); while (c && !isOwner && (c=c._parentComponent)) { isOwner = c.constructor===vnode.nodeName; } if (c && isOwner && (!mountAll || c._component)) { setComponentProps(c, props, ASYNC_RENDER, context, mountAll); dom = c.base; } else { if (originalComponent && !isDirectOwner) { unmountComponent(originalComponent); dom = oldDom = null; } c = createComponent(vnode.nodeName, props, context); if (dom && !c.nextBase) { c.nextBase = dom; // passing dom/oldDom as nextBase will recycle it if unused, so bypass recycling on L241: oldDom = null; } setComponentProps(c, props, SYNC_RENDER, context, mountAll); dom = c.base; if (oldDom && dom!==oldDom) { oldDom._component = null; recollectNodeTree(oldDom, false); } } return dom; } /** Remove a component from the DOM and recycle it. * @param {Element} dom A DOM node from which to unmount the given Component * @param {Component} component The Component instance to unmount * @private */ export function unmountComponent(component) { if (options.beforeUnmount) options.beforeUnmount(component); // console.log(`${remove?'Removing':'Unmounting'} component: ${component.constructor.name}`); let base = component.base; component._disable = true; if (component.componentWillUnmount) component.componentWillUnmount(); component.base = null; // recursively tear down & recollect high-order component children: let inner = component._component; if (inner) { unmountComponent(inner); } else if (base) { if (base[ATTR_KEY] && base[ATTR_KEY].ref) base[ATTR_KEY].ref(null); component.nextBase = base; base.remove(); collectComponent(component); removeChildren(base); // removeOrphanedChildren(base.childNodes, true); } if (component.__ref) component.__ref(null); }
1
10,735
Thinking this should be `removeNode()` exported from `dom/index.js`. Thoughts?
preactjs-preact
js