text
stringlengths
4
1.02M
meta
dict
import webapp2 import json import logging from google.appengine.api import users from google.appengine.ext import ndb from serializers import clone_for_json from models import Frame, Photo from logic.streams import get_stream_photos, get_photo class PublicApi(webapp2.RequestHandler): def get(self, id): access_key = self.request.get('access_key') key = ndb.Key('Frame', int(id)) frame = key.get() if (frame is None or frame.access_key != access_key): self.response.status = 404 self.response.write('frame not found') else: streams = [stream.get() for stream in frame.streams] frame_with_streams = { 'frame': frame.serialize(), 'streams': ([self.serialize_stream(s) for s in streams]) } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(json.dumps(frame_with_streams)) @staticmethod def fetch_photos(stream_key): stream = stream_key.get() photos = get_stream_photos(stream) return [g.serialize() for g in photos] @staticmethod def serialize_stream(stream): serialized_stream = clone_for_json(stream) serialized_stream['photos'] = PublicApi.fetch_photos(stream.key) return serialized_stream class PublicPhotoApi(webapp2.RequestHandler): def get(self, frame_id, stream_id, photo_id): access_key = self.request.get('access_key') stream_key = ndb.Key('Stream', int(stream_id)) frame_key = ndb.Key('Frame', int(frame_id)) frame = frame_key.get() if (frame is None or frame.access_key != access_key): self.response.status = 404 self.response.write('frame not found') return logging.info("found frame {0}".format(frame)) if (not stream_key in frame.streams): self.response.status = 404 self.response.write('stream not found in frame') return logging.info("found stream {0}".format(stream_key)) stream = stream_key.get() get_photo(stream, photo_id, 'main', self)
{ "content_hash": "df1eacff2821866e466ef29e5615e46e", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 72, "avg_line_length": 30.38888888888889, "alnum_prop": 0.6160877513711152, "repo_name": "ido-ran/ran-smart-frame2", "id": "4fec984120f961e037df579bf29d05944d06c1b1", "size": "2188", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/server/api/public.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "94" }, { "name": "CSS", "bytes": "951" }, { "name": "Elm", "bytes": "5945" }, { "name": "HTML", "bytes": "1664" }, { "name": "Java", "bytes": "5612" }, { "name": "JavaScript", "bytes": "39574" }, { "name": "Kotlin", "bytes": "18014" }, { "name": "Python", "bytes": "1790974" } ], "symlink_target": "" }
"""Tests for `ribodesigner` package.""" import filecmp from pathlib import Path from click.testing import CliRunner from sequana.ribodesigner import RiboDesigner from sequana.scripts.main import ribodesigner from . import test_dir resources_dir = Path(test_dir) / "data" / "ribodesigner" def test_ribodesigner(tmp_path): rd = RiboDesigner( fasta=resources_dir / "sample.fas", gff=resources_dir / "sample.gff", output_directory=tmp_path, force=True ) rd.run() assert filecmp.cmp(tmp_path / "probes_sequences.fas", resources_dir / "probes_sequences.fas") assert filecmp.cmp(tmp_path / "clustered_probes.fas", resources_dir / "clustered_probes.fas") assert filecmp.cmp(tmp_path / "clustered_probes.csv", resources_dir / "clustered_probes.csv") def test_ribodesigner_cli(tmp_path): runner = CliRunner() result = runner.invoke( ribodesigner.ribodesigner, [ str(resources_dir / "sample.fas"), str(resources_dir / "sample.gff"), "--output-directory", tmp_path / "out_ribodesigner", ], ) assert result.exit_code == 0
{ "content_hash": "9a6dfa3700f35f3dba19188b3fbe492a", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 115, "avg_line_length": 30.756756756756758, "alnum_prop": 0.6678383128295254, "repo_name": "sequana/sequana", "id": "bda5817f9fbf8ce14fa425eab886fc74f0fdd4e5", "size": "1138", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "test/test_ribodesigner.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "6314" }, { "name": "Dockerfile", "bytes": "1693" }, { "name": "HTML", "bytes": "5379" }, { "name": "JavaScript", "bytes": "686" }, { "name": "Jupyter Notebook", "bytes": "1990042" }, { "name": "Python", "bytes": "1509148" }, { "name": "R", "bytes": "60806" }, { "name": "Shell", "bytes": "2553" }, { "name": "Singularity", "bytes": "4235" } ], "symlink_target": "" }
__author__ = "Mari Wahl" __copyright__ = "Copyright 2014, The Cogent Project" __credits__ = ["Mari Wahl"] __license__ = "GPL" __version__ = "2.0" __maintainer__ = "Mari Wahl" __email__ = "[email protected]" import os import numpy as np from sklearn import preprocessing # CONSTANTS INPUT_FOLDER = '../../data/divide_train_test/' OUTPUT_FOLDER = "../../data/processed_normalized-train-test/" OUTPUT_TRAIN_G = OUTPUT_FOLDER + "train_" + str(PERCENTAGE) + "_gauss.data" OUTPUT_TEST_G = OUTPUT_FOLDER + "test_" + str(PERCENTAGE) + "_gauss.data" OUTPUT_TRAIN_M = OUTPUT_FOLDER + "train_" + str(PERCENTAGE) + "_xmin.data" OUTPUT_TEST_M = OUTPUT_FOLDER + "test_" + str(PERCENTAGE) + "_xmin.data" OUTPUT_TRAIN_N = OUTPUT_FOLDER + "train_" + str(PERCENTAGE) + "_none.data" OUTPUT_TEST_N = OUTPUT_FOLDER + "test_" + str(PERCENTAGE) + "_none.data" IN_FILE_TRAIN = INPUT_FOLDER + "train_" + str(PERCENTAGE) + ".data" IN_FILE_TEST = INPUT_FOLDER + "test_" + str(PERCENTAGE) + ".data" def save_results(X, Y, output_file): ''' Save in a file the results of spliting''' with open(output_file , "w") as f: for i in range(len(X)): for j in range(len(X[i])): f.write(str(X[i][j]) + ',') f.write(str(Y[i]) + "\n") def load_data(datafile_name): ''' Load the data and separate it by feature and labels ''' data = np.loadtxt(datafile_name, delimiter = ',') # features X = data[:,:-1] # label Y = data[:,-1] return X, Y def main(): # get paths and create output folder input_train = IN_FILE_TRAIN input_test = IN_FILE_TEST output_folder = OUTPUT_FOLDER if not os.path.exists(output_folder): os.makedirs(output_folder) output_train_g = OUTPUT_TRAIN_G output_test_g = OUTPUT_TEST_G output_train_m = OUTPUT_TRAIN_M output_test_m = OUTPUT_TEST_M output_train_n = OUTPUT_TRAIN_N output_test_n = OUTPUT_TEST_N # open files learn_data_X, learn_data_Y = load_data(input_train) predict_data_X, predict_data_Y = load_data(input_test) ''' No normlization ''' # save results save_results(learn_data_X, learn_data_Y, output_train_n) save_results(predict_data_X, predict_data_Y, output_test_n) ''' Run normalizer xmin, xmax ''' # run normalizer gaussian scaler = preprocessing.StandardScaler().fit(learn_data_X) learn_data_X_1 = scaler.transform(learn_data_X) predict_data_X_1 = scaler.transform(predict_data_X) # save results save_results(learn_data_X_1, learn_data_Y, output_train_g) save_results(predict_data_X_1, predict_data_Y, output_test_g) ''' Run normalizer xmin, xmax ''' min_max_scaler = preprocessing.MinMaxScaler() learn_data_X_2 = min_max_scaler.fit_transform(learn_data_X) predict_data_X_2 = min_max_scaler.transform(predict_data_X) # save results save_results(learn_data_X_2, learn_data_Y, output_train_m) save_results(predict_data_X_2, predict_data_Y, output_test_m) print 'Results saved at ' + output_folder print 'Done!' if __name__ == '__main__': main()
{ "content_hash": "a2d81a5ae2736668026beb864f96e33e", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 79, "avg_line_length": 26.61904761904762, "alnum_prop": 0.5882528324388789, "repo_name": "bt3gl/MLNet-Classifying-Complex-Networks", "id": "1318d4ea21b2c63386f0668ab5a6892f762ca173", "size": "3377", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MLNet-2.0/preparing_sets/normalize_data/deprecated/main.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "126149" }, { "name": "Shell", "bytes": "907" } ], "symlink_target": "" }
import os import json from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.contrib.auth.decorators import login_required from django.utils.encoding import smart_unicode from django.conf import settings from django.db import connections, transaction from dajax.core import Dajax from django.contrib.auth.models import User from website.utils.httpUtil import HttpRequestProcessor from website.models import * from website.utils.LogHelper import LogHelper from website.utils.answerHelper import AnswerHelper from website.models import User, UserDetail, Organization, Address, OrganizationAddress, RoleType, OrganizationMember, Question, AnswerReference import datetime, random import json from website.utils.fieldValidationCycleUtil import FieldValidationCycleUtil #mappings from natSolDB to our db JURISTICTION_TYPE_MAPPING = { None: None, 'City': 'CI', 'County': 'CO', 'City and County': 'CC', 'Independent City': 'IC', 'State Agency': 'S', 'Unincorporated': 'U', } @login_required def migrate_jurisdiction_data(request): user = request.user requestProcessor = HttpRequestProcessor(request) dajax = Dajax() data = {} data['message'] = '' log_files = [] log_path = os.path.join(settings.LOG_ROOT, 'migration') files = [ f for f in os.listdir(log_path) if os.path.isfile(os.path.join(log_path, f)) ] for file in reversed(files): log_file = {'name': file, 'url': '/media/log/migration/'+file} log_files.append(log_file) data['log_files'] = log_files if not user.is_superuser: return requestProcessor.render_to_response(request,'website/deny.html', {}, '') ajax = requestProcessor.getParameter('ajax') if ajax != None: #just checking progress if ajax == 'progress': progress = ServerVariable.get('migrate_jurisdiction_progress') dajax.assign('#progress_div','innerHTML', progress) status = ServerVariable.get('migrate_jurisdiction_status') if status == 'ended': dajax.assign('#status','innerHTML', 'ended') dajax.assign('#result_div','innerHTML', 'Migration ended.') return HttpResponse(dajax.json()) #reset in case it is stuck if ajax == 'reset': ServerVariable.set('migrate_jurisdiction_progress', '') ServerVariable.set('migrate_jurisdiction_status', 'reset') dajax.assign('#status','innerHTML', 'ended') dajax.assign('#result_div','innerHTML', 'Migration reset.') return HttpResponse(dajax.json()) #quick test of the migration process if ajax == 'test': #testing, limit records #alpha_list = ['u','z'] #alpha_list = ['sam'] #include Sample Jurisdiction #alpha_list = ['san '] alpha_list = ['san ', 'sam'] #include Sample Jurisdiction #alpha_list = ['cloud', 'oakland'] #Oakland, Cloud has parent of Cloud #alpha_list = ['ma'] #address import test #alpha_list = ["O'"] #test bad char in jurisdiction name ajax = 'run' #start the complete migration process if ajax == 'start': alpha_list = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z'] ajax = 'run' #run the migration process if ajax == 'run': #don't start another process if already running migrate_jurisdiction_status = ServerVariable.get('migrate_jurisdiction_status') if migrate_jurisdiction_status == 'started': dajax.assign('#progress_div','innerHTML', 'Process already running!') return HttpResponse(dajax.json()) ServerVariable.set('migrate_jurisdiction_status', 'started') #remember this has started #standard data answer_helper = AnswerHelper() tab = '&nbsp;&nbsp;&nbsp;&nbsp;' answer_action_category = ActionCategory.objects.get(name='AddRequirement') vote_action_category = ActionCategory.objects.get(name='VoteRequirement') log_helper = LogHelper('migration', 'MigrateJurisdictions') log_helper.write_header() cursor = connections['natSolDB'].cursor() #cursor.execute("SELECT jurisdictionID, jurisdictionName, city, county, state, latitude, longitude, jurisdictionType, parentJurisdiction FROM nspd_jurisdictions") #load a dict of nspd org id to our organization objects cursor.execute('''SELECT * FROM nspd_organizations WHERE 1=1''', []) org_records = dictfetchall(cursor) org_lookup = {} has_org = False for org_record in org_records: organizations = Organization.objects.filter(name__iexact=org_record['oName']) if len(organizations) == 0: dajax.assign('#result_div','innerHTML', 'Missing organization in this database:'+smart_unicode(org_record['oName'])) dajax.assign('#status','innerHTML', 'ended') return HttpResponse(dajax.json()) else: has_org = True organization = organizations[0] org_lookup[org_record['oID']] = organization if has_org == False: dajax.assign('#result_div','innerHTML', 'Unable to set up organization mapping.') dajax.assign('#status','innerHTML', 'ended') return HttpResponse(dajax.json()) #loop alphabetically for alpha in alpha_list: #try: if True: #for testing to see exception name_like = alpha+'%' cursor.execute('''SELECT j.jurisdictionID AS id, j.jurisdictionName AS name, j.city, j.county, s.pusps AS state, j.latitude, j.longitude, t.name AS jurisdictionType, j.parentJurisdiction, j.created, j.lastModified FROM nspd_jurisdictions AS j LEFT JOIN nspd_states AS s ON j.state = s.fips LEFT JOIN nspd_jurisdictionTypes AS t ON j.jurisdictionType = t.jurisdictionTypeID WHERE j.jurisdictionName LIKE %s''', [name_like]) records = dictfetchall(cursor) matching_jurisdiction_count = 0 new_jurisdiction_count = 0 record_count = 0 for record in records: #ensure incoming text is clean unicode record['name'] = smart_unicode(record['name']) record['city'] = smart_unicode(record['city']) record['county'] = smart_unicode(record['county']) notes = '' notes2 = '' #has it been migrated? jid = MigrationHistory.get_target_id('nspd_jurisdictions', record['id'], 'Jurisdiction') if jid != None: similar_jurisdictions = Jurisdiction.objects.filter(id=jid) else: #find the same jurisdiction in our db by matching fields #similar_jurisdictions = Jurisdiction.objects.filter(name=record['name'], city=record['city'], state=record['state'], jurisdiction_type=JURISTICTION_TYPE_MAPPING[record['jurisdictionType']]) similar_jurisdictions = [] #don't try to match, avoid similar ones if len(similar_jurisdictions) > 0: #for now, just use the first one jurisdiction = similar_jurisdictions[0] matching_jurisdiction_count += 1 #currently modifying jurisdiction even if already exist else: jurisdiction = Jurisdiction(name=record['name'], city=record['city'], state=record['state'], jurisdiction_type=JURISTICTION_TYPE_MAPPING[record['jurisdictionType']]) #try to use the original id, if not exist in this db same_id_jurisdictions = Jurisdiction.objects.filter(id=record['id']) if len(same_id_jurisdictions) == 0: jurisdiction.id = record['id'] new_jurisdiction_count += 1 jurisdiction.county = record['county'] jurisdiction.latitude = record['latitude'] jurisdiction.longitude = record['longitude'] jurisdiction.create_datetime = record['created'] jurisdiction.modify_datetime = record['lastModified'] jurisdiction.save() #if not exactly one match if len(similar_jurisdictions) != 1: log_helper.write_all('Warning - Name: '+record['name']+', County: '+record['county']+', State: '+record['state']+', Type: '+record['jurisdictionType']+' has '+str(len(similar_jurisdictions))+' matching jurisdictions in this database.') for similar_jurisdiction in similar_jurisdictions: log_helper.write_all(tab+similar_jurisdiction.name+', '+similar_jurisdiction.county+', '+similar_jurisdiction.state+', '+similar_jurisdiction.jurisdiction_type+'') notes += smart_unicode(jurisdiction.name)+', '+smart_unicode(jurisdiction.county)+', '+str(jurisdiction.state)+', '+str(jurisdiction.jurisdiction_type) MigrationHistory.save_history(jurisdiction, 'nspd_jurisdictions', record['id'], 'Jurisdiction', jurisdiction.id, notes, notes2) #Migrate jurisdiction parent info if record['parentJurisdiction'] != 0: #look up parent j id in migration history jurisdiction_histories = MigrationHistory.objects.filter(source_table='nspd_jurisdictions', source_id=record['parentJurisdiction'], target_table='Jurisdiction') if len(jurisdiction_histories) == 0: log_helper.write_all('Warning - Name: '+record['name']+', County: '+record['county']+', State: '+record['state']+', Type: '+record['jurisdictionType']+' Cannot set parent jurisdiction, not migrated yet, parent natSolDB id: '+str(record['parentJurisdiction'])+'.') notes2 += 'Warning: Cannot set parent jurisdiction, not migrated yet, parent natSolDB id: '+str(record['parentJurisdiction']) MigrationHistory.save_history(jurisdiction, 'nspd_jurisdictions', record['id'], 'Jurisdiction', jurisdiction.id, notes, notes2) else: jurisdiction_history = jurisdiction_histories[0] parent_jurisdictions = Jurisdiction.objects.filter(id=jurisdiction_history.target_id) if len(parent_jurisdictions) == 0: log_helper.write_all('Warning - Name: '+record['name']+', County: '+record['county']+', State: '+record['state']+', Type: '+record['jurisdictionType']+' Cannot look up parent jurisdiction, id: '+str(jurisdiction_history.target_id)+'.') else: parent_jurisdiction = parent_jurisdictions[0] jurisdiction.parent = parent_jurisdiction jurisdiction.save() notes2 += 'Parent jurisdiction id: '+str(parent_jurisdiction.id)+ ', name: '+parent_jurisdiction.name MigrationHistory.save_history(jurisdiction, 'nspd_jurisdictions', record['id'], 'Jurisdiction', jurisdiction.id, notes, notes2) #look up existing answers in natSolDB cursor.execute('''SELECT * FROM nspd_answers AS a WHERE a.jurisdictionID = %s ORDER BY a.questionID, a.created''', [record['id']]) answer_records = dictfetchall(cursor) if len(answer_records) > 0: log_helper.write_log('Name: '+record['name']+', County: '+record['county']+', State: '+record['state']+', Type: '+record['jurisdictionType']+' - has '+str(len(answer_records))+' answers in natSolDB.') #get all answers already in our db for this jurisdiction #answers = AnswerReference.objects.filter(jurisdiction=jurisdiction).order_by('question__id') #log_helper.write_log(tab+'This database has '+str(len(answers))+' answers.') #loop through the answer records last_edit_time = None last_contributor = None for answer_record in answer_records: answer_notes = 'Question ID: '+str(answer_record['questionID'])+' ' answer_value = answer_helper.format_import_answer(answer_record) answer_notes += smart_unicode(answer_value) answer_notes2 = '' #log_helper.write_log(tab+'Question ID: '+str(answer_record['questionID'])+' Answer: '+answer_value) #skip if this answer is a child of another question if answer_helper.is_child_answer(answer_record) == True: #log_helper.write_log(tab+'Answer to child question, skipping.') answer_notes2 += 'Child answer, to be migrated with parent.' MigrationHistory.save_history(jurisdiction, 'nspd_answers', answer_record['answerID'], 'AnswerReference', None, answer_notes, answer_notes2) continue #does this question has child questions child_answer_records = answer_helper.get_child_answers(answer_record, answer_records) for child_answer_record in child_answer_records: answer_notes += 'Has child answer with Question ID: '+str(child_answer_record['questionID'])+'.' #check if answer already in our db #look up our user from natSolDB user id creator = UserDetail.get_migrated_user_by_id(answer_record['creatorUserID']) if creator == None: log_helper.write_all(tab+'Warning - User from natSolDB with ID: '+str(answer_record['creatorUserID'])+' does not exist in this DB, answer skipped.') continue #skip #get question, question id in both dbs should be the same try: question = Question.objects.get(id=answer_record['questionID']) except: #log_helper.write_log(tab+tab+'Failed to find question in this DB with id:'+str(answer_record['questionID'])+', skipped.') answer_notes2 += 'Warning: no such question to migrate over.' MigrationHistory.save_history(jurisdiction, 'nspd_answers', answer_record['answerID'], 'AnswerReference', None, answer_notes, answer_notes2) #can't import this answer continue #has it been migrated? new_answer = False aid = MigrationHistory.get_target_id('nspd_answers', answer_record['answerID'], 'AnswerReference') if aid != None: similar_answers = AnswerReference.objects.filter(id=aid) matching_answer = similar_answers[0] else: #exclude other answers already migrated migrated_answer_ids = MigrationHistory.objects.filter(source_table='nspd_answers', source_id=answer_record['answerID'], target_table='AnswerReference').values_list('target_id', flat=True) #similar answer not migrated similar_answers = AnswerReference.objects.filter(jurisdiction=jurisdiction, question=question, creator=creator).exclude(id__in=migrated_answer_ids) matching_answer = None new_answer = True for similar_answer in similar_answers: if answer_helper.is_answer_match(answer_record, similar_answer): matching_answer = similar_answer new_answer = False #log_helper.write_log(tab+tab+'Has matching answer in this DB.') if matching_answer == None: #log_helper.write_log(tab+tab+'No matching answer in this DB.') #add answer matching_answer = AnswerReference(jurisdiction=jurisdiction, question=question, creator=creator) #if address, try to get value from nspd_addresses if question.migration_type == 'address': cursor.execute('''SELECT * FROM nspd_addresses WHERE answerId = %s''', [answer_record['answerID']]) address_records = dictfetchall(cursor) if len(address_records) == 0: #no address yet, put into free-form matching_answer.value = answer_helper.migrate_answer_value(question, answer_record, child_answer_records) answer_notes2 += 'Warning - using free-form address: '+smart_unicode(matching_answer.value) else: address_record = address_records[0] #should have only one anyway #pass in address_record instead of answer record matching_answer.value = answer_helper.migrate_answer_value(question, address_record, child_answer_records, True) #is_special=True answer_notes2 += 'From nspd_addresses: '+smart_unicode(matching_answer.value) else: matching_answer.value = answer_helper.migrate_answer_value(question, answer_record, child_answer_records) answer_notes2 += smart_unicode(matching_answer.value) #log_helper.write_log(tab+tab+'Answer JSON: '+matching_answer.value) matching_answer.is_callout = answer_record['isCallout'] if answer_record['isValidated'] == 1: matching_answer.approval_status = 'A' else: matching_answer.approval_status = 'P' if answer_record['creatorOrgID'] != None: try: organization = org_lookup[answer_record['creatorOrgID']] matching_answer.organization = organization except: answer_notes2 += ' Warning - failed to look up organization: '+smart_unicode(answer_record['creatorOrgID']) matching_answer.migrated_answer_id = answer_record['answerID'] matching_answer.creator = creator matching_answer.save() #save again to aviod auto_now_add value matching_answer.create_datetime = answer_record['created'] matching_answer.modify_datetime = answer_record['created'] matching_answer.status_datetime = answer_record['created'] matching_answer.save() MigrationHistory.save_history(jurisdiction, 'nspd_answers', answer_record['answerID'], 'AnswerReference', matching_answer.id, answer_notes, answer_notes2) #Action for Answer matching_answer_action = None answer_action_notes = '' answer_action_notes2 = '' similar_answer_actions = Action.objects.filter(user=creator, category=answer_action_category, entity_id=matching_answer.id) if len(similar_answer_actions) > 0: #already has this action matching_answer_action = similar_answer_actions[0] #should be only one anyway #log_helper.write_log(tab+tab+'Already has same answer action in DB.') else: #if no match, add matching_answer_action = Action(user=creator, category=answer_action_category, entity_id=matching_answer.id) #log_helper.write_log(tab+tab+'No matching answer action in DB.') matching_answer_action.jurisdiction = jurisdiction matching_answer_action.question_category = question.category matching_answer_action.entity_name = 'Requirement' #matching_answer_action.entity_name = 'AnswerReference' #TODO: should be changed to this in the whole app matching_answer_action.data = 'Answer: '+matching_answer.value matching_answer_action.scale = answer_action_category.points matching_answer_action.save() #save again to aviod auto_now_add value matching_answer_action.action_datetime = answer_record['created'] matching_answer_action.save() answer_action_notes += '' answer_action_notes2 += '' MigrationHistory.save_history(jurisdiction, 'nspd_answers', answer_record['answerID'], 'Action', matching_answer_action.id, answer_action_notes, answer_action_notes2) # TODO: save action for Vote #vote_action.save() #look up comments for this answer in natSolDB cursor.execute('''SELECT * FROM nspd_comments WHERE answerID = %s ORDER BY created''', [answer_record['answerID']]) comment_records = dictfetchall(cursor) #loop through the comments for comment_record in comment_records: #look up our user from natSolDB user id commenter = UserDetail.get_migrated_user_by_id(comment_record['uID']) if commenter == None: log_helper.write_all(tab+'Warning - User from natSolDB with ID: '+str(comment_record['uID'])+' does not exist in this DB, comment skipped.') continue #skip #check if the comment is already in our db #has it been migrated? matching_comment = None cid = MigrationHistory.get_target_id('nspd_comments', comment_record['commentID'], 'Comment') if cid != None: similar_comments = Comment.objects.filter(id=cid) if len(similar_comments) > 0: matching_comment = similar_comments[0] #there should be only one if matching_comment == None: #add new comment matching_comment = Comment() matching_comment.jurisdiction = jurisdiction matching_comment.entity_name = 'AnswerReference' matching_comment.entity_id = matching_answer.id matching_comment.user = commenter matching_comment.comment_type = 'JC' #'Jurisdiction Comment' matching_comment.comment = smart_unicode(comment_record['comment']) matching_comment.parent_comment = None if comment_record['isFlagged'] == 1: matching_comment.approval_status = 'F' else: matching_comment.approval_status = 'P' matching_comment.save() #save again to aviod auto_now_add value matching_comment.create_datetime = comment_record['created'] matching_comment.save() comment_notes = '' comment_notes2 = matching_comment.comment MigrationHistory.save_history(jurisdiction, 'nspd_comments', comment_record['commentID'], 'Comment', matching_comment.id, comment_notes, comment_notes2) #look up votes for this answer in natSolDB cursor.execute('''SELECT * FROM nspd_votes AS v WHERE v.answerID = %s ORDER BY v.modified''', [answer_record['answerID']]) vote_records = dictfetchall(cursor) if len(vote_records) > 0: #log_helper.write_log(tab+tab+'This answer has '+str(len(vote_records))+' votes.') pass #loop through the votes for vote_record in vote_records: #look up our user from natSolDB user id voter = UserDetail.get_migrated_user_by_id(vote_record['uID']) if voter == None: log_helper.write_all(tab+'Warning - User from natSolDB with ID: '+str(answer_record['creatorUserID'])+' does not exist in this DB, answer skipped.') continue #skip #check if the vote is already in our db #has it been migrated? vote_action = None vid = MigrationHistory.get_target_id('nspd_votes', vote_record['voteID'], 'Action') if vid != None: similar_votes = Action.objects.filter(id=vid) if len(similar_votes) > 0: vote_action = similar_votes[0] #there should be only one if vote_action == None: #add new vote action vote_action = Action() vote_action.user = voter vote_action.category = vote_action_category vote_action.entity_id = matching_answer.id vote_action.jurisdiction = jurisdiction vote_action.question_category = question.category vote_action.entity_name = 'reference' #vote_action.entity_name = 'AnswerReference' #TODO: should be changed to this in the whole app if vote_record['voteIsPositive'] == 1: vote_action.data = 'Vote: Up' else: vote_action.data = 'Vote: Down' vote_action.scale = 2 vote_action.save() #save again to aviod auto_now_add value vote_action.action_datetime = vote_record['modified'] vote_action.save() vote_notes = '' vote_notes2 = vote_action.data MigrationHistory.save_history(jurisdiction, 'nspd_votes', vote_record['voteID'], 'Action', vote_action.id, vote_notes, vote_notes2) #end votes loop #print str(last_edit_time) +', ' + str(record['created']) if last_edit_time == None or last_edit_time < record['created']: last_edit_time = record['created'] last_contributor = creator #end answers loop if last_edit_time != None: jurisdiction.last_contributed = last_edit_time jurisdiction.last_contributed_by = last_contributor org_members = OrganizationMember.objects.filter(user=last_contributor) if len(org_members) > 0: org = org_members[0].organization jurisdiction.last_contributed_by_org = org jurisdiction.save() record_count += 1 progress_text = 'Name starting with "'+alpha+'": '+str(record_count)+' of '+str(len(records))+' records processed' ServerVariable.set('migrate_jurisdiction_progress', progress_text) #end records loop log_helper.write_all(str(len(records))+' jurisdictions starting with "'+alpha+'" in natSolDB.') log_helper.write_all(str(matching_jurisdiction_count)+' matching jurisdictions starting with "'+alpha+'" in this DB.') log_helper.write_all('Added '+str(new_jurisdiction_count)+' jurisdictions starting with "'+alpha+'" to this DB.') #comment out except and handling for test... #except Exception, e: #log_helper.write_all('Exception: '+str(e)) ServerVariable.set('migrate_jurisdiction_status', 'ended') #update server status log_helper.write_footer() #dajax.assign('#result_div','innerHTML', log_helper.log_buffer) dajax.assign('#result_div','innerHTML', 'Migration completed.') dajax.assign('#status','innerHTML', 'ended') #update showing of log files log_files = [] log_path = os.path.join(settings.LOG_ROOT, 'migration') files = [ f for f in os.listdir(log_path) if os.path.isfile(os.path.join(log_path, f)) ] #for file in reversed(files): for file in files: log_file = {'name': file, 'url': '/media/log/migration/'+file} log_files.append(log_file) data['log_files'] = log_files body = requestProcessor.decode_jinga_template(request,'website/utils/migration_logs.html', data, '') dajax.assign('#log_div','innerHTML', body) return HttpResponse(dajax.json()) return requestProcessor.render_to_response(request,'website/utils/migrate_jurisdictions.html', data, '') #show jurisdiction migration result on a page, admin only @login_required def jurisdiction_migration_result(request, id): user = request.user requestProcessor = HttpRequestProcessor(request) data = {} if not user.is_staff and not user.is_superuser: #admin only return requestProcessor.render_to_response(request,'website/deny.html', {}, '') data['jurisdiction'] = jurisdiction= Jurisdiction.objects.filter(id=id) data['histories'] = histories = MigrationHistory.objects.filter(jurisdiction_id=id).exclude(target_table='Action').order_by('source_table','source_id') return requestProcessor.render_to_response(request,'website/utils/migration_result.html', data, '') @login_required def migrate_unincorporated(request): user = request.user requestProcessor = HttpRequestProcessor(request) data = {} miss_parent_jurisdiction = [] unincorporated_add_num = 0 unincorporated_update_num = 0 unincorporated_without_parent_num = 0 if not user.is_superuser: return requestProcessor.render_to_response(request,'website/deny.html', {}, '') cursor = connections['natSolDB'].cursor() cursor.execute("SELECT fips, pusps, status FROM nspd_states") records = dictfetchall(cursor) states = {} for record in records: states[record['fips']] = record['pusps'] cursor.execute("SELECT jurisdictionID, jurisdictionName, city, county, state, latitude, longitude, jurisdictionType, parentJurisdiction FROM nspd_jurisdictions n WHERE n.jurisdictionType = 6 ") records = dictfetchall(cursor) for record in records: #print record['longitude'] jurisdictions = Jurisdiction.objects.filter(name__iexact=record['jurisdictionName'], latitude__iexact=record['latitude'], longitude__iexact=record['longitude'], jurisdiction_type__iexact='U') if jurisdictions: jurisdiction = jurisdictions[0] unincorporated_update_num += 1 else: jurisdiction = Jurisdiction() unincorporated_add_num += 1 if record['parentJurisdiction'] == '' or record['parentJurisdiction'] == None: unincorporated_without_parent_num +=1 miss_parent_jurisdiction.append(record['jurisdictionName']) else: cursor.execute("SELECT jurisdictionID, jurisdictionName, city, county, state, latitude, longitude, jurisdictionType, parentJurisdiction FROM nspd_jurisdictions n WHERE n.jurisdictionID = " + str(record['parentJurisdiction'])) parent_records = dictfetchall(cursor) if parent_records: parent_record = parent_records[0] parent_jurisdictions = None if parent_record['jurisdictionType'] == 1: parent_jurisdictions = Jurisdiction.objects.filter(name__iexact=parent_record['jurisdictionName'], latitude__iexact=parent_record['latitude'],longitude__iexact=parent_record['longitude'], jurisdiction_type__iexact='CI') elif parent_record['jurisdictionType'] == 2: parent_jurisdictions = Jurisdiction.objects.filter(name__iexact=parent_record['jurisdictionName'], latitude__iexact=parent_record['latitude'],longitude__iexact=parent_record['longitude'], jurisdiction_type__iexact='CO') elif parent_record['jurisdictionType'] == 3: parent_jurisdictions = Jurisdiction.objects.filter(name__iexact=parent_record['jurisdictionName'], latitude__iexact=parent_record['latitude'],longitude__iexact=parent_record['longitude'], jurisdiction_type__iexact='CC') #else: if parent_jurisdictions: parent_jurisdiction = parent_jurisdictions[0] jurisdiction.parent = parent_jurisdiction else: unincorporated_without_parent_num +=1 miss_parent_jurisdiction.append(record['jurisdictionName']) else: unincorporated_without_parent_num +=1 miss_parent_jurisdiction.append(record['jurisdictionName']) jurisdiction.name = record['jurisdictionName'] jurisdiction.jurisdiction_type = 'U' jurisdiction.city = record['city'] jurisdiction.county = record['county'] jurisdiction.state = states[record['state']] jurisdiction.latitude = record['latitude'] jurisdiction.longitude = record['longitude'] jurisdiction.save() data['miss_parent_jurisdiction'] = miss_parent_jurisdiction data['unincorporated_add_num'] = unincorporated_add_num data['unincorporated_update_num'] = unincorporated_update_num data['unincorporated_without_parent_num'] = unincorporated_without_parent_num return requestProcessor.render_to_response(request,'website/utils/migrate_unincorporated.html', data, '') def dictfetchall(cursor): "Returns all rows from a cursor as a dict" desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ] #@csrf.csrf_protect def set_up_data_sprint_19(request): if request.user.is_authenticated() and request.user.is_superuser == 1: set_up_county_city_relationship() match_user_n_details() #migrate_temmplatequestion_to_question() no need. to be done by initial_data.json return HttpResponseRedirect("/") def set_up_county_city_relationship(): return counties = Jurisdiction.objects.filter(jurisdiction_type='CO').order_by('state', 'name') for county in counties: cities = Jurisdiction.objects.filter(jurisdiction_type='CI', state__iexact=county.state, county__istartswith=county.name).order_by('state', 'city') for city in cities: city.parent_id = county.id city.save() def match_user_n_details(): return users = User.objects.all() for user in users: user_detail = UserDetail.objects.filter(user=user) if len(user_detail) == 0: user_detail = UserDetail() user_detail.user_id = user.id user_detail.display_preference = 'username' user_detail.save() def migrate_temmplatequestion_to_question(): template_questions = TemplateQuestion.objects.all() for template_question in template_questions: try: question = Question.objects.get(id=template_question.question_id) question.qtemplate_id = template_question.template_id question.save() except: print "no question for id = " + str(template_question.question_id) def migrate_organizationMember(): return members = OrganizationMember.objects.filter(role__id = 3) role = RoleType.objects.get(id = 1) for member in members: member.role = role member.save() @login_required def migrate_users(request): user = request.user requestProcessor = HttpRequestProcessor(request) data = {} print "data migration starts >>>>>>>>>> " if not user.is_superuser: return requestProcessor.render_to_response(request,'website/deny.html', {}, '') cursor = connections['natSolDB'].cursor() cursor.execute("SELECT fips, pusps, status FROM nspd_states") records = dictfetchall(cursor) states = {} for record in records: states[record['fips']] = record['pusps'] # get unique jurisdictionID from natSolDB's answers. # get same jurisdiction from doe_dev # build matching_jurisdictions ''' matching_jurisdictions = {} no_matching_jurisdictions = {} cursor.execute("SELECT jurisdictionID, jurisdictionName, city, county, state, jurisdictionType FROM nspd_jurisdictions n WHERE n.jurisdictionID IN (SELECT DISTINCT a.jurisdictionID FROM nspd_answers a)") records = dictfetchall(cursor) print "distinct jurs from nspd count :: " + str(len(records)) for record in records: print "nspd jur :: " print str(record['jurisdictionName']) + '-' + str(record['city']) + '-' + str(record['county']) if record['jurisdictionType'] == 1: jurisdictions = Jurisdiction.objects.filter(name__iexact=record['jurisdictionName'],city__iexact=record['city'],county__iexact=record['county'],state__iexact=states[record['state']], jurisdiction_type__iexact='CI') elif record['jurisdictionType'] == 2: jurisdictions = Jurisdiction.objects.filter(name__iexact=record['jurisdictionName'],county__iexact=record['county'],state__iexact=states[record['state']], jurisdiction_type__iexact='CO') elif record['jurisdictionType'] == 3: jurisdictions = Jurisdiction.objects.filter(name__iexact=record['jurisdictionName'],city__iexact=record['city'],county__iexact=record['county'],state__iexact=states[record['state']], jurisdiction_type__iexact='CC') if jurisdictions: #print "doe jur match found" matching_jurisdictions[record['jurisdictionID']] = jurisdictions[0].id else: print ">>>>>>>>>> doe jur match NOT FOUND >>> " + str(record['jurisdictionName']) no_matching_jurisdictions[record['jurisdictionID']] = record['jurisdictionName'] + '-' + str(record['city']) + '-' + record['county'] + '-' + str(record['state']) print "matching_jurisdictions count :: " + str(len(matching_jurisdictions)) print "no_matching_jurisdictions count :: " + str(len(no_matching_jurisdictions)) print "no_matching_jurisdictions :: " print no_matching_jurisdictions matching_question_categories = {} cursor.execute("SELECT questionCategoryID, questionCategory FROM nspd_questionCategories") records = dictfetchall(cursor) for record in records: question_categories = QuestionCategory.objects.filter(name__iexact=record['questionCategory']) if question_categories: matching_question_categories[record['questionCategoryID']] = question_categories[0].id print "matching_question_categories count :: " + str(len(matching_question_categories)) # get unique questionID from natSolDB's answers. # get same question from doe_dev # build matching_questions matching_questions = {} no_matching_questions = {} cursor.execute("SELECT q.questionID, questionCategoryID, question FROM nspd_questions q WHERE q.questionID IN (SELECT DISTINCT a.questionID FROM nspd_answers a)") records = dictfetchall(cursor) print "distinct questionID from nspd count :: " + str(len(records)) for record in records: print str(record['questionID']) + '-' + str(record['question']) question_category_id = matching_question_categories[record['questionCategoryID']] questionCategory = QuestionCategory.objects.get(id=question_category_id) questions = Question.objects.filter(category__exact=questionCategory, question__iexact=record['question']) if questions: #print "doe question match found" matching_questions[record['questionID']] = questions[0].id else: print ">>>>>>>>>> doe question match NOT FOUND >>> " + str(record['questionID']) + '_' + str(record['question']) no_matching_questions[record['questionID']] = record['question'] print "matching_questions count :: " + str(len(matching_questions)) print "no_matching_questions count :: " + str(len(no_matching_questions)) print "no_matching_questions :: " print no_matching_questions ''' # organization matching_orgs = {} nspd_org_owners = {} cursor.execute("SELECT oID, oName, oDescription, oOwnerID, oPhone, oAddress, oAddress2, oCity, oState, oZip, oUrl FROM nspd_organizations") records = dictfetchall(cursor) data['nspd_organizations'] = len(records) org_added = 0 org_existed = 0 for record in records: doe_orgs = Organization.objects.filter(name__iexact=record['oName']) # assumption: org name is unique if doe_orgs: print "org already exists. check if has address." + str(record['oName']) org = doe_orgs[0] org_id = org.id org_existed = org_existed + 1 if record['oDescription'] != None: org.description=record['oDescription'] if record['oPhone'] != None: org.phone=record['oPhone'] if record['oUrl'] != None: org.website=record['oUrl'] org.save() doe_org_addresses = OrganizationAddress.objects.filter(organization__exact=doe_orgs[0]) if doe_org_addresses: print "address already exists. don't add the address" else: if record['oAddress'] != None or record['oAddress2'] != None or record['oCity'] != None or record['oState'] != None or record['oZip'] != None: address = Address() if record['oAddress'] != None: address.address1=record['oAddress'] if record['oAddress2'] != None: address.address2=record['oAddress2'] if record['oCity'] != None: address.city=record['oCity'] if record['oState'] != None: address.state=states[record['oState']] if record['oZip'] != None: address.zip_code=record['oZip'] address.save() print "add address" organization_address = OrganizationAddress(organization_id=org_id, address_id=address.id) organization_address.save() print "add org address" else: print "don't add address because data is not provided" else: org = Organization() org.name=record['oName'] if record['oDescription'] != None: org.description=record['oDescription'] if record['oPhone'] != None: org.phone=record['oPhone'] if record['oUrl'] != None: org.website=record['oUrl'] org.save() org_added = org_added + 1 org_id = org.id print "add org >>>> " + str(record['oName']) if record['oAddress'] != None or record['oAddress2'] != None or record['oCity'] != None or record['oState'] != None or record['oZip'] != None: address = Address() if record['oAddress'] != None: address.address1=record['oAddress'] if record['oAddress2'] != None: address.address2=record['oAddress2'] if record['oCity'] != None: address.city=record['oCity'] if record['oState'] != None: address.state=states[record['oState']] if record['oZip'] != None: address.zip_code=record['oZip'] address.save() print "add address " organization_address = OrganizationAddress(organization_id=org_id, address_id=address.id) organization_address.save() print "add org address" else: print "don't add address because data is not provided" matching_orgs[record['oID']] = org_id nspd_org_owners[record['oID']] = record['oOwnerID'] print "matching orgs :: " + str(len(matching_orgs)) print matching_orgs data['org_added'] = org_added data['org_existed'] = org_existed # user nspd_user_org = {} nspd_org_user = {} cursor.execute("SELECT uID, oID FROM nspd_userOrganizations") records = dictfetchall(cursor) for record in records: nspd_user_org[record['uID']] = record['oID'] if record['oID'] not in nspd_org_user: nspd_org_user[record['oID']] = record['uID'] ownerRole = RoleType.objects.get(name="Administrator") memberRole = RoleType.objects.get(name="Member") matching_users = {} cursor.execute("SELECT uID, uName, uEmail, uPassword, uIsActive, uDateAdded, uLastLogin FROM Users") records = dictfetchall(cursor) data['nspd_users'] = len(records) user_added = 0 user_existed = 0 for record in records: if record['uID'] in nspd_user_org: # this user belongs to an org nspd_org_id = nspd_user_org[record['uID']] doe_org_id = matching_orgs[nspd_org_id] if nspd_org_id in nspd_org_owners: # this org has an owner if record['uID'] == nspd_org_owners[nspd_org_id]: # this user is an org owner role_id = ownerRole.id else: role_id = memberRole.id else: role_id = memberRole.id else: doe_org_id = 0 users_by_email_username = User.objects.filter(email__iexact=record['uEmail'], username__iexact=record['uName']) if users_by_email_username: print 'exact user already exists. same email, same username. update it.' user = users_by_email_username[0] # should be only one user with that unique username update(user, record) user_existed = user_existed + 1 user_id = user.id else: print "check if any user with email similar to that of the incoming record." users_by_email = User.objects.filter(email__iexact=record['uEmail']) if users_by_email: for user in users_by_email: # there may be more than one with email similar to the incoming one. users_by_uname = User.objects.filter(username__iexact=record['uName']).exclude(email__iexact=record['uEmail']) if users_by_uname: "the username exists in the system. give the incoming username to the user in question. but first change the username of all the found users" user_with_same_uname = users_by_uname[0] # by unique name, should be only one. user_with_same_uname.username = user_with_same_uname.username + str(random.randrange(1,100+1)) user_with_same_uname.save() # update the user with the new username update(user, record) user_existed = user_existed + 1 user_id = user.id else: # no record with similar email. check if any similar username print "no similar email. check if any user with username similar to that of the incoming record." users_by_uname = User.objects.filter(username__iexact=record['uName']) if users_by_uname: print "user " + str(record['uName']) + ' already exists. ' user = users_by_uname[0] # only one allowed by the system update(user, record) user_existed = user_existed + 1 user_id = user.id else: print "incoming record has no similar email or uname in the existing db. >>>> add as the new user." user = User() user.username = record['uName'] user.email = record['uEmail'] user.is_active=record['uIsActive'] user.last_login=datetime.datetime.fromtimestamp(record['uLastLogin']) #need to convert to right format user.date_joined=record['uDateAdded'] user.save() user_added = user_added + 1 user_id = user.id user = User.objects.get(id=user_id) user_details = UserDetail.objects.filter(user__exact=user) if user_details: user_detail = user_details[0] user_detail.old_password = record['uPassword'] user_detail.migrated_id = record['uID'] user_detail.save() else: user_detail = UserDetail() user_detail.user_id = user_id user_detail.old_password = record['uPassword'] user_detail.migrated_id = record['uID'] user_detail.save() if doe_org_id > 0: print "This user belongs to an org." org = Organization.objects.get(id=doe_org_id) doe_org_members = OrganizationMember.objects.filter(user__exact=user, organization__exact=org) if doe_org_members: print "user already a member to the same org in doe as in nspd. don't add another one." else: print "add member to org in doe as in nspd" org_member = OrganizationMember(organization_id=doe_org_id, user_id=user_id,role_id=role_id, status='A') org_member.save() else: print "This user belongs to no org in nspd." matching_users[record['uID']] = user_id print "matching users count :: " + str(len(matching_users)) print matching_users data['matching_users'] = len(matching_users) data['user_added'] = user_added data['user_existed'] = user_existed ''' # answer matching_answers = {} answers_not_added = {} added_answer_count = 0 action_category = ActionCategory.objects.filter(name__iexact='AddRequirement') entity_name='Requirement' cursor.execute("SELECT answerID, jurisdictionID, questionID, creatorOrgID, isValidated, textValue, tinytextValue, tinyintValue, smallintValue, mediumintValue, intValue, timeValue, created FROM nspd_answers") records = dictfetchall(cursor) answer_added = 0 answer_existed = 0 data['nspd_answers'] = len(records) for record in records: if record['textValue'] != None: answer_data = record['textValue'] elif record['tinytextValue'] != None: answer_data = record['tinytextValue'] elif record['tinyintValue'] != None: answer_data = record['tinyintValue'] elif record['smallintValue'] != None: answer_data = record['smallintValue'] elif record['mediumintValue'] != None: answer_data = record['mediumintValue'] elif record['intValue'] != None: answer_data = record['intValue'] elif record['timeValue'] != None: answer_data = record['timeValue'] else: answer_data = None if answer_data != None: if record['jurisdictionID'] in matching_jurisdictions: jurisdiction_id = matching_jurisdictions[record['jurisdictionID']] #user_id = matching_users[record['uID']] contributorOrgId = matching_orgs[record['creatorOrgID']] # user this to figure out contribution question_id = matching_questions[record['questionID']] # may need to avoid adding answers already existed in the system caused by previous migration. question = Question.objects.get(id=question_id) question_category_ids = QuestionCategory.objects.filter(question__exact=question) question_category_id = question_category_ids[0].id jurisdiction = Jurisdiction.objects.get(id=matching_jurisdictions[record['jurisdictionID']]) if isinstance(answer_data, (int, long)): answer_value = str(answer_data) else: answer_value = answer_data.encode('utf-8') doe_jur_answers = AnswerReference.objects.filter(question__exact=question, jurisdiction__exact=jurisdiction, migrated_answer_id__exact=record['answerID']) if doe_jur_answers: answer_existed = answer_existed + 1 else: answer = AnswerReference() answer.question_id = question_id answer.value = answer_value answer.jurisdiction_id = jurisdiction_id #answer.is_current = '' # need to check the other database #answer.rating = '' # vote data? if record['isValidated'] == 1: answer.rating_status = 'C' else: answer.rating_status = 'U' #answer.approal_status = '' if record['creatorOrgID'] in nspd_org_user: nspd_user_id = nspd_org_user[record['creatorOrgID']] if nspd_user_id != 0: if nspd_user_id in matching_users: answer.creator_id = matching_users[nspd_user_id] # no creatorID, so we use org owner instead. answer.create_datetime = record['created'] answer.migrated_answer_id = record['answerID'] answer.save() answer_added = answer_added + 1 matching_answers[record['answerID']] = answer.id added_answer_count = added_answer_count + 1 #answer_data = str(answer.value) contributionHelper = ContributionHelper() contributionHelper.save_action('AddRequirement', answer.value, answer.id, entity_name, answer.creator_id, jurisdiction_id ) else: answers_not_added[record['answerID']] = str(record['answerID']) + 'answer not added because no matching jurisdiction' else: answers_not_added[record['answerID']] = str(record['answerID']) + 'answer not added because no actual data' data['answer_added'] = answer_added data['answer_existed'] = answer_existed data['answers_not_added'] = answers_not_added print "nspd answers count :: " + str(len(records)) print "added_answer_count :: " + str(added_answer_count) # need to maintain vote, contribution, rating, status action_category = ActionCategory.objects.filter(name__iexact='VoteRequirement') entity_name='Requirement' cursor.execute("SELECT uID, oID, answerID, voteIsPositive, modified FROM nspd_votes") records = dictfetchall(cursor) vote_added = 0 vote_existed = 0 vote_helper = VoteHelper() data['nspd_votes'] = len(records) for record in records: if record['answerID'] in matching_answers: answer_id = matching_answers[record['answerID']] answer = AnswerReference.objects.get(id=answer_id) question = Question.objects.get(id=answer.question_id) if record['voteIsPositive'] == 1: vote_data = "Up" else: vote_data = "Down" user_id = matching_users[record['uID']] final_vote_data = 'Vote: ' + str(vote_data) votes = Action.objects.filter(entity_name__iexact=entity_name, entity_id__exact=answer_id, category__exact=action_category[0], data__iexact= final_vote_data) if votes: print "vote already migrated or existed. Don't do anything." vote_existed = vote_existed + 1 else: vote_helper.vote(answer_id, vote_data, user_id, entity_name, answer.jurisdiction_id) vote_added = vote_added + 1 else: print "vote not migrated because of no matching answer found" + str(record['answerID']) data['vote_added'] = vote_added data['vote_existed'] = vote_existed ''' return requestProcessor.render_to_response(request,'website/data_migration.html', data, '') # no need to migrate comment because all are junk or test comment. def update(user, record): user.username = record['uName'] user.password = '' user.email = record['uEmail'] user.is_active=record['uIsActive'] user.last_login=datetime.datetime.fromtimestamp(record['uLastLogin']) #need to convert to right format user.date_joined=record['uDateAdded'] user.save() def patch_cf_questions_display_order(): question_categories = QuestionCategory.objects.filter(accepted=1).order_by('display_order') data = {} data['non_cf_questions'] = {} data['cf_questions'] = {} for question_category in question_categories: questions = Question.objects.filter(category=question_category, accepted=1).exclude(form_type__exact='CF').order_by('-display_order') last_question = questions[0] # assumption: non-cf questions are predefined and setup. always have questions and they all have display_order. if not, bad setup. display_order = last_question.display_order data[question_category.id] = {} data['non_cf_questions'][question_category.id] = questions cf_questions = Question.objects.filter(category=question_category, accepted=1, form_type__exact='CF') for question in cf_questions: display_order = display_order + 1 question.display_order = display_order question.save() cf_questions = Question.objects.filter(category=question_category, accepted=1, form_type__exact='CF') data['cf_questions'][question_category.id] = cf_questions data['question_categories'] = question_categories #return requestProcessor.render_to_response(request,'website/utils/patch_correct_questions_display_order.html', data, '') def correct_fee(request): data = {} requestProcessor = HttpRequestProcessor(request) ''' "default_value": "{\"fee_type_1\": \"fee type\", \"fee_item_1_1\": \"fee item\", \"fee_formula_1_1\": \"flat_rate\", \"fee_other_1_1\": \"\", \"fee_percentage_of_total_system_cost_cap_1_1\": \"\", \"fee_per_inverter_1_1\": \"0\", \"fee_flat_rate_1_1\": \"\", \"fee_per_major_components_1_1\": \"0\", \"fee_jurisdiction_cost_recovery_notes_1_1\": \"\", \"fee_percentage_of_total_system_cost_1_1\": \"\", \"fee_percentage_of_total_system_cost_cap_amt_1_1\": \"\", \"fee_per_component_cap_1_1\": \"\", \"fee_per_component_cap_cap_amt_1_1\": \"\", \"fee_per_module_1_1\": \"0\"}", {"percentage_of_total_system_cost_cap": "", "fee_per_inverter": "", "flat_rate_amt": "", "fee_per_major_components": "", "jurisdiction_cost_recovery_notes": "", "percentage_of_total_system_cost": "", "percentage_of_total_system_cost_cap_amt": "", "value": "($100 plan review fee + permit costs). If strictly under\r\nelectrical permit:$50 inspection fee, 1.60 to 6.50 depending on kw of system,\r\nadditional fees if service needs to be rebuilt, new panels, safety switches + $100\r\nplan review fee. If building permit is required, based on valuation for review fees\r\nand building permit fee; electrical permit costs after approval of building permit.", "fee_per_component_cap": "", "fee_per_component_cap_cap_amt": "", "fee_per_module": ""} ''' question = Question.objects.get(id=16) #answers = AnswerReference.objects.filter(question=question).exclude(value__contains='fee_type') #answers = AnswerReference.objects.filter(question=question).exclude(value__contains='flat_rate_amt') answers = AnswerReference.objects.filter(question=question) for answer in answers: ''' old_value = json.loads(answer.value) new_value = {} new_value['fee_type_1'] = 'fee_type' new_value['fee_item_1_1'] = 'fee_item' if 'percentage_of_total_system_cost_cap' in old_value: new_value['fee_percentage_of_total_system_cost_cap_1_1'] = old_value['percentage_of_total_system_cost_cap'] else: new_value['fee_percentage_of_total_system_cost_cap_1_1'] = '' if 'fee_per_inverter' in old_value: new_value['fee_per_inverter_1_1'] = old_value['fee_per_inverter'] else: new_value['fee_per_inverter_1_1'] = '' if 'flat_rate_amt' in old_value: new_value['fee_flat_rate_1_1'] = old_value['flat_rate_amt'] else: new_value['fee_flat_rate_1_1'] = '' if 'fee_per_major_components' in old_value: new_value['fee_per_major_components_1_1'] = old_value['fee_per_major_components'] else: new_value['fee_per_major_components_1_1'] = '' if 'jurisdiction_cost_recovery_notes' in old_value: new_value['fee_jurisdiction_cost_recovery_notes_1_1'] = old_value['jurisdiction_cost_recovery_notes'] else: new_value['fee_jurisdiction_cost_recovery_notes_1_1'] = '' if 'percentage_of_total_system_cost' in old_value: new_value['fee_percentage_of_total_system_cost_1_1'] = old_value['percentage_of_total_system_cost'] else: new_value['fee_percentage_of_total_system_cost_1_1'] = '' if 'percentage_of_total_system_cost_cap_amt' in old_value: new_value['fee_percentage_of_total_system_cost_cap_amt_1_1'] = old_value['percentage_of_total_system_cost_cap_amt'] else: new_value['fee_percentage_of_total_system_cost_cap_amt_1_1'] = '' if 'fee_per_component_cap' in old_value: new_value['fee_per_component_cap_1_1'] = old_value['fee_per_component_cap'] else: new_value['fee_per_component_cap_1_1'] = '' if 'fee_per_component_cap_cap_amt' in old_value: new_value['fee_per_component_cap_cap_amt_1_1'] = old_value['fee_per_component_cap_cap_amt'] else: new_value['fee_per_component_cap_cap_amt_1_1'] = '' if 'fee_per_module' in old_value: new_value['fee_per_module_1_1'] = old_value['fee_per_module'] else: new_value['fee_per_module_1_1'] = '' if 'value' in old_value: new_value['fee_other_1_1'] = old_value['value'] else: new_value['fee_other_1_1'] = '' if 'flat_rate_amt' in old_value and old_value['flat_rate_amt'] != '' and old_value['flat_rate_amt'] != None: formula = 'flat_rate' elif 'jurisdiction_cost_recovery_notes' in old_value and old_value['jurisdiction_cost_recovery_notes'] != '' and old_value['jurisdiction_cost_recovery_notes'] != None: formula = 'jurisdiction_cost_recovery' elif 'percentage_of_total_system_cost' in old_value and old_value['percentage_of_total_system_cost'] != '' and old_value['percentage_of_total_system_cost'] != None: formula = 'percentage_of_total_system_cost' elif 'value' in old_value and old_value['value'] != '' and old_value['value'] != None: formula = 'other' elif ('fee_per_inverter' in old_value and old_value['fee_per_inverter'] != '' and old_value['fee_per_inverter'] != None) or ('fee_per_module' in old_value and old_value['fee_per_module'] != '' and old_value['fee_per_module'] != None) or ('fee_per_major_components' in old_value and old_value['fee_per_major_components'] != '' and old_value['fee_per_major_components'] != None): formula = 'fee_per_component' else: formula = 'flat_rate' new_value['fee_formula_1_1'] = formula value = json.dumps(new_value) # to convert to json ''' fieldValidationCycleUtil = FieldValidationCycleUtil() value = fieldValidationCycleUtil.process_answer(question, answer.value) encoded_value = value.encode('utf-8') answer.value = encoded_value answer.save() data['answers'] = answers return requestProcessor.render_to_response(request,'website/fee_correction.html', data, '') def prep_4_sprint_32(): return question_id = 282 # forms question = Question.objects.get(id=question_id) answers = AnswerReference.objects.filter(question__exact=question) if len(answers) > 0: for answer in answers: if answer.value != '' and answer.value != None: try: answer_details = json.loads(answer.value) if 'link_1' in answer_details.keys(): if answer_details['link_1'] != '': answer_details['form_option'] = 'link' answer.value = json.dumps(answer_details) answer.save() except: pass question_id = 96 # solar permitting checklist question = Question.objects.get(id=question_id) answers = AnswerReference.objects.filter(question__exact=question) if len(answers) > 0: for answer in answers: if answer.value != '' and answer.value != None: try: answer_details = json.loads(answer.value) if 'url' in answer_details.keys(): if answer_details['url'] != '': answer_details['form_option'] = 'link' answer_details['link_1'] = answer_details['url'] answer_details['url'] = '' answer_details['available'] = 'yes' answer.value = json.dumps(answer_details) answer.save() except: pass question_id = 105 # Online inspection checklist question = Question.objects.get(id=question_id) answers = AnswerReference.objects.filter(question__exact=question) if len(answers) > 0: for answer in answers: if answer.value != '' and answer.value != None: try: answer_details = json.loads(answer.value) if 'value' in answer_details.keys(): if answer_details['value'] != '': answer_details['form_option'] = 'link' answer_details['link_1'] = answer_details['value'] answer_details['value'] = '' answer_details['available'] = 'yes' answer.value = json.dumps(answer_details) answer.save() except: pass question_id = 62 # Required spec sheets question = Question.objects.get(id=question_id) answers = AnswerReference.objects.filter(question__exact=question) if len(answers) > 0: for answer in answers: if answer.value != '' and answer.value != None: try: answer_details = json.loads(answer.value) if 'value' in answer_details.keys(): if answer_details['value'] != '': answer_details['form_option'] = 'link' answer_details['link_1'] = answer_details['value'] answer_details['value'] = '' answer.value = json.dumps(answer_details) answer.save() except: pass
{ "content_hash": "107ece9ee3c5e73a1dd367406358116a", "timestamp": "", "source": "github", "line_count": 1334, "max_line_length": 423, "avg_line_length": 56.26386806596702, "alnum_prop": 0.5360131102110425, "repo_name": "solarpermit/solarpermit", "id": "5c545931ab08ca76fda26a87950cadd5da0f994b", "size": "75056", "binary": false, "copies": "1", "ref": "refs/heads/devel", "path": "website/views/data_migration.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "126992" }, { "name": "JavaScript", "bytes": "808802" }, { "name": "Python", "bytes": "6625868" } ], "symlink_target": "" }
"""Takes a screenshot from an Android device.""" import argparse import logging import os import sys if __name__ == '__main__': sys.path.append( os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..'))) from devil.android import device_utils from devil.android.tools import script_common from devil.utils import logging_common logger = logging.getLogger(__name__) def main(): # Parse options. parser = argparse.ArgumentParser(description=__doc__) logging_common.AddLoggingArguments(parser) script_common.AddDeviceArguments(parser) parser.add_argument( '-f', '--file', metavar='FILE', help='Save result to file instead of generating a ' 'timestamped file name.') parser.add_argument( 'host_file', nargs='?', help='File to which the screenshot will be saved.') args = parser.parse_args() host_file = args.host_file or args.file logging_common.InitializeLogging(args) devices = script_common.GetDevices(args.devices, args.blacklist_file) def screenshot(device): f = None if host_file: root, ext = os.path.splitext(host_file) f = '%s_%s%s' % (root, str(device), ext) f = device.TakeScreenshot(f) print 'Screenshot for device %s written to %s' % (str(device), os.path.abspath(f)) device_utils.DeviceUtils.parallel(devices).pMap(screenshot) return 0 if __name__ == '__main__': sys.exit(main())
{ "content_hash": "4b5a657b70280a6a7e1b24e5685c7d30", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 73, "avg_line_length": 27.163636363636364, "alnum_prop": 0.6365461847389559, "repo_name": "endlessm/chromium-browser", "id": "5ab2419f0ebb7c54b76a5ff4c6eb06d43f5f1e0b", "size": "1678", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "third_party/catapult/devil/devil/android/tools/screenshot.py", "mode": "33261", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
def dhh_to_halakhim(days, hours, halakhim): return halakhim + 1080 * (hours + 24 * days) def halakhim_to_wdhh(total_halakhim): total_hours = total_halakhim / 1080 halakhim = total_halakhim % 1080 total_days = total_hours / 24 hours = total_hours % 24 total_weeks = total_days / 7 days = total_days % 7 return (total_weeks, days, hours, halakhim) day_length = dhh_to_halakhim(1, 0, 0) month_length = dhh_to_halakhim(29, 12, 793) first_moon = dhh_to_halakhim(2, 5, 204) am_epoch = 347996 def wd_to_jd((weeks, days)): return weeks * 7 + days + am_epoch ########### # New year. # We calculate Rosh Hashanah, the Jewish New Year's Day, by first counting up # the right number of months since Day 1 (12 or 13 months per year, depending # on intercalations)... def months_before_year(year): metonic_cycles = ((year - 1) / 19) * 235 r = (year - 1) % 19 months = [ 0, 12, 24, 37, 49, 61, 74, 86, 99, 111, 123, 136, 148, 160, 173, 185, 197, 210, 222 ] return metonic_cycles + months[r] def molad_tishrei(year): return first_moon + months_before_year(year) * month_length # But then there are some rather complex rules by which we can "postpone" # Rosh Hashanah if the moon is too late in the day, or if the year would be # starting on a "bad" day, i.e. one which would cause certain festivals to # fall on forbidden days of the week, later in the year. def rosh_hashanah(year): molad = molad_tishrei(year) (weeks, days, hours, halakhim) = halakhim_to_wdhh(molad) if hours > 18: molad = molad + day_length (weeks, days, hours, halakhim) = halakhim_to_wdhh(molad) if ((not is_leap) and days == 3 and (hours >= 10 or (hours == 9 and halakhim > 204))): molad = molad + day_length (weeks, days, hours, halakhim) = halakhim_to_wdhh(molad) if (is_leap(year) and days == 2 and (hours >= 16 or (hours == 15 and halakhim > 589))): molad = molad + day_length (weeks, days, hours, halakhim) = halakhim_to_wdhh(molad) if days in [ 1, 4, 6 ]: molad = molad + day_length (weeks, days, hours, halakhim) = halakhim_to_wdhh(molad) return (weeks, days) def is_leap(year): return (year % 19) in [ 0, 3, 6, 8, 11, 14, 17 ] ######### # Pesach. # Now we can finally calculate the beginning of Passover. It's simply the 15th # of Nisan, but between 1 Tishrei and 15 Nisan there are three possible # intercalations: a missing day in defective years, an extra day in excessive # years, and the leap month of Adar I in leap years (which may themselves be # defective, excessive, or neither). # # Excessive or defective years arise simply because of the complex round-off # and postponement rules for Rosh Hashanah. The simplest way to calculate # them is to just compute next year's Rosh Hashanah and see how many days are # between now and then. length_4 = 30 + 29 + 30 + 29 length_shevat = 30 length_adar_i = 30 length_adar = 29 length_nisan = 30 length_iyar = 29 length_sivan = 30 days_to_pesach = 30 + 29 + 30 + 29 + 30 + 29 + 14 def shevat_jd(year): rosh_hashanah_jd = wd_to_jd(rosh_hashanah(year)) next_rh_jd = wd_to_jd(rosh_hashanah(year + 1)) adar_i = length_adar_i * is_leap(year) excess = (next_rh_jd - rosh_hashanah_jd) - (354 + adar_i) return rosh_hashanah_jd + length_4 + excess def adar_jd(year): return shevat_jd(year) + length_shevat + (length_adar_i * is_leap(year)) def adar_i_jd(year): return shevat_jd(year) + length_shevat def nisan_jd(year): return adar_jd(year) + length_adar def pesach_jd(year): return nisan_jd(year) + 14 def iyar_jd(year): return nisan_jd(year) + length_nisan def sivan_jd(year): return iyar_jd(year) + length_iyar def tamuz_jd(year): return sivan_jd(year) + length_sivan # Cheesy Anno Domini to Anno Mundi conversion which punts the issue of the # different New Years' Days by being pegged to Passover. def ad_to_am_at_pesach(ad): return ad + 3760
{ "content_hash": "e4181833e920b940c02798767c7afde6", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 79, "avg_line_length": 32.91869918699187, "alnum_prop": 0.6517658681155841, "repo_name": "jleen/antikythera", "id": "ff686c3a3dfb313825811dd6efc609905d3d5920", "size": "5013", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "hebrew.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "26609" } ], "symlink_target": "" }
from nipype.interfaces.ants import N4BiasFieldCorrection from src.utils import splitext from nipype.interfaces.ants.segmentation import BrainExtraction from src.arg_parser import icbm_default_template, file_dir from nipype.interfaces.base import CommandLine, CommandLineInputSpec from nipype.interfaces.base import (TraitedSpec, File, traits, InputMultiPath, BaseInterface, OutputMultiPath, BaseInterfaceInputSpec, isdefined) from nipype.interfaces.ants import registration, segmentation from nipype.interfaces.ants.segmentation import Atropos from nipype.interfaces.ants import Registration, ApplyTransforms from src.utils import copyCommand from src.ants import APPIANRegistration, APPIANApplyTransforms from nipype.interfaces.utility import Rename import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu import nipype.interfaces.utility as util import src.initialization as init import nipype.interfaces.io as nio import nipype.interfaces.minc as minc import nibabel as nib import numpy as np import ntpath import os global icbm_default_csf global icbm_default_gm global icbm_default_wm icbm_default_csf=file_dir+os.sep+"/atlas/MNI152/mni_icbm152_csf_tal_nlin_asym_09c.nii.gz" icbm_default_gm=file_dir+os.sep+"/atlas/MNI152/mni_icbm152_gm_tal_nlin_asym_09c.nii.gz" icbm_default_wm=file_dir+os.sep+"/atlas/MNI152/mni_icbm152_wm_tal_nlin_asym_09c.nii.gz" icbm_default_brain=file_dir+os.sep+"/atlas/MNI152/mni_icbm152_t1_tal_nlin_asym_09c_mask.nii.gz" def get_workflow(name, opts): workflow = pe.Workflow(name=name) in_fields = ['mri'] if opts.user_brainmask : in_fields += ['brain_mask_space_stx'] if opts.user_mri_stx : in_fields += ['tfm_mri_stx', 'tfm_stx_mri'] label_types = [ opts.results_label_type] label_imgs = [ opts.results_label_img ] stages = ['results'] if opts.pvc_method != None : stages += ['pvc'] label_imgs += [ opts.pvc_label_img ] label_types += [ opts.pvc_label_type ] if opts.quant_method != None : stages += ['quant'] label_imgs += [ opts.results_label_img ] label_types += [ opts.results_label_type ] inputnode = pe.Node(niu.IdentityInterface(fields=in_fields), name="inputnode") out_fields=['tfm_stx_mri', 'tfm_mri_stx', 'brain_mask_space_stx', 'brain_mask_space_mri', 'mri_space_stx', 'mri_space_nat', 'template_space_mri' ] for stage, label_type in zip(stages, label_types): if 'internal_cls' == label_type : out_fields += [ stage+'_label_img'] outputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name='outputnode') ########################################## # T1 spatial (+ intensity) normalization # ########################################## if opts.n4_bspline_fitting_distance != 0 : n4 = pe.Node(N4BiasFieldCorrection(), "mri_intensity_normalized" ) workflow.connect(inputnode, 'mri', n4, 'input_image') n4.inputs.dimension = 3 n4.inputs.bspline_fitting_distance = opts.n4_bspline_fitting_distance n4.inputs.shrink_factor = opts.n4_shrink_factor n4.inputs.n_iterations = opts.n4_n_iterations n4.inputs.convergence_threshold = opts.n4_convergence_threshold else : n4 = pe.Node(niu.IdentityInterface(fields=["output_image"]), name='mri_no_intensity_normalization') workflow.connect(inputnode, 'mri', n4, 'output_image') # If the use does not provide a transformation file to align the MRI to the stereotaxic template, then use ants to do the registration if opts.user_mri_stx == '': mri2template = pe.Node(interface=APPIANRegistration(), name="mri_spatial_normalized") mri2template.inputs.moving_image_space="T1w" mri2template.inputs.fixed_image_space="stx" mri2template.inputs.fixed_image_mask = icbm_default_brain mri2template.inputs.fixed_image = opts.template workflow.connect(n4, 'output_image', mri2template, 'moving_image') if opts.user_ants_command != None : mri2template.inputs.user_ants_command = opts.user_ants_command if opts.normalization_type : mri2template.inputs.normalization_type = opts.normalization_type mri_stx_file = 'warped_image' mri_stx_node = mri2template template_nat_file = 'inverse_warped_image' tfm_node= mri2template tfm_inv_node= mri2template if opts.normalization_type == 'nl' : tfm_file='composite_transform' tfm_inv_file='inverse_composite_transform' elif opts.normalization_type == 'affine' or opts.normalization_type == 'rigid' : tfm_file='out_matrix' tfm_inv_file='out_matrix_inverse' else : print("Error: --normalization-type should be either rigid, lin, or nl") exit(1) else : transform_mri = pe.Node(interface=APPIANApplyTransforms(), name="transform_mri" ) transform_mri.inputs.target_space='t1' workflow.connect(inputnode, 'mri', transform_mri, 'input_image') workflow.connect(inputnode, 'tfm_mri_stx', transform_mri, 'transform_1') transform_mri.inputs.reference_image = opts.template transform_mri.inputs.create_inverse_image = True mri_stx_node = transform_mri mri_stx_file = 'output_image' template_nat_file = 'output_image_inverse' tfm_node = inputnode tfm_file = 'tfm_mri_stx' tfm_inv_node=inputnode tfm_inv_file='tfm_stx_mri' # # T1 in native space will be part of the APPIAN target directory # and hence it won't be necessary to link to the T1 in the source directory. # copy_mri_nat = pe.Node(interface=copyCommand(), name="mri_nat" ) workflow.connect(inputnode, 'mri', copy_mri_nat, 'input_file') ################################### # Segment T1 in Stereotaxic space # ################################### seg=None if opts.ants_atropos_priors == [] and opts.template == icbm_default_template : opts.ants_atropos_priors = file_dir+os.sep+"/atlas/MNI152/mni_icbm152_%02d_tal_nlin_asym_09c.nii.gz" if opts.ants_atropos_priors == [] : print("Warning : user did not provide alternative priors for template. This will affect your T1 MRI segmentation. Check this segmentation visually to make sure it is what you want ") for stage, label_type, img in zip(stages, label_types, label_imgs) : if seg == None : seg = pe.Node(interface=Atropos(), name="segmentation_ants") seg.inputs.dimension=3 seg.inputs.number_of_tissue_classes=3 #len(opts.ants_atropos_priors) seg.inputs.initialization = 'PriorProbabilityImages' seg.inputs.prior_weighting = opts.ants_atropos_prior_weighting seg.inputs.prior_image = opts.ants_atropos_priors seg.inputs.likelihood_model = 'Gaussian' seg.inputs.posterior_formulation = 'Socrates' seg.inputs.use_mixture_model_proportions = True seg.inputs.args="-v 1" workflow.connect(mri_stx_node, mri_stx_file, seg, 'intensity_images' ) seg.inputs.mask_image = icbm_default_brain #workflow.connect(brain_mask_node, brain_mask_file, seg, 'mask_image' ) print(stage, img) if 'antsAtropos' == img : workflow.connect(seg, 'classified_image', outputnode, stage+'_label_img') #################### # T1 Brain masking # #################### if not opts.user_brainmask : # if opts.brain_extraction_method == 'beast': # #Brain Mask MNI-Space # mriMNI_brain_mask = pe.Node(interface=beast(), name="mri_stx_brain_mask") # mriMNI_brain_mask.inputs.library_dir = library_dir # mriMNI_brain_mask.inputs.template = library_dir+"/margin_mask.mnc" # mriMNI_brain_mask.inputs.configuration = mriMNI_brain_mask.inputs.library_dir+os.sep+"default.2mm.conf" # mriMNI_brain_mask.inputs.same_resolution = True # mriMNI_brain_mask.inputs.median = True # mriMNI_brain_mask.inputs.fill = True # mriMNI_brain_mask.inputs.median = True # workflow.connect(mri_stx_node, mri_stx_file, mriMNI_brain_mask, "in_file" ) # brain_mask_node = mriMNI_brain_mask # brain_mask_file = 'out_file' # else : #mriMNI_brain_mask = pe.Node(interface=BrainExtraction(), name="mri_stx_brain_mask") #mriMNI_brain_mask.inputs.dimension = 3 #mriMNI_brain_mask.inputs.brain_template = opts.template #template_base, template_ext = splitext(opts.template) #mriMNI_brain_mask.inputs.brain_probability_mask =template_base+'_variant-brain_pseg'+template_ext mriMNI_brain_mask = pe.Node(interface=SegmentationToBrainMask(), name="mri_stx_brain_mask") #workflow.connect(mri_stx_node, mri_stx_file, mriMNI_brain_mask, "anatomical_image" ) workflow.connect(seg, 'classified_image', mriMNI_brain_mask, "seg_file" ) brain_mask_node = mriMNI_brain_mask brain_mask_file = 'output_image' else : brain_mask_node = inputnode brain_mask_file = 'brain_mask_space_stx' # # Transform brain mask from stereotaxic to T1 native space # transform_brain_mask = pe.Node(interface=APPIANApplyTransforms(),name="transform_brain_mask") transform_brain_mask.inputs.interpolation = 'NearestNeighbor' transform_brain_mask.inputs.target_space = 't1' workflow.connect(brain_mask_node, brain_mask_file, transform_brain_mask, 'input_image') workflow.connect(tfm_node, tfm_inv_file, transform_brain_mask, 'transform_1') workflow.connect(copy_mri_nat,'output_file', transform_brain_mask,'reference_image') ############################### # Pass results to output node # ############################### workflow.connect(brain_mask_node, brain_mask_file, outputnode, 'brain_mask_space_stx') workflow.connect(tfm_node, tfm_file, outputnode, 'tfm_mri_stx' ) workflow.connect(tfm_node, tfm_inv_file, outputnode, 'tfm_stx_mri' ) workflow.connect(transform_brain_mask, 'output_image', outputnode, 'brain_mask_space_mri') #workflow.connect(mri_stx_node, mri_stx_file, outputnode, 'mri_space_stx') workflow.connect(copy_mri_nat, 'output_file', outputnode, 'mri_space_nat') workflow.connect(tfm_node, template_nat_file, outputnode, 'template_space_mri') return(workflow) class SegmentationToBrainMaskOutput(TraitedSpec): output_image = File(argstr="%s", desc="Brain Mask") class SegmentationToBrainMaskInput(CommandLineInputSpec): output_image= File(argstr="%s", desc="Brain Mask", position=-1) seg_file = File(argstr="%s", desc="Segmentation", position=-1) class SegmentationToBrainMask(BaseInterface): input_spec = SegmentationToBrainMaskInput output_spec = SegmentationToBrainMaskOutput def _run_interface(self, runtime) : if not isdefined(self.inputs.output_image): self.inputs.output_image = self._gen_output(self.inputs.seg_file) img = nib.load(self.inputs.seg_file) data = img.get_data() data[ data > 1 ] = 1 out = nib.Nifti1Image(data.astype(np.int16), img.get_affine(), img.header ) out.to_filename (self.inputs.output_image) return runtime def _gen_output(self, basefile): fname = ntpath.basename(basefile) fname_list = splitext(fname) # [0]= base filename; [1] =extension dname = os.getcwd() return dname+ os.sep+fname_list[0] + "_brain_mask" + fname_list[1] def _list_outputs(self): if not isdefined(self.inputs.output_image): self.inputs.output_image = self._gen_output(self.inputs.seg_file) outputs = self.output_spec().get() outputs["output_image"] = self.inputs.output_image return outputs
{ "content_hash": "26dc0f2f3f5009519b9036b3b255a891", "timestamp": "", "source": "github", "line_count": 266, "max_line_length": 190, "avg_line_length": 45.10526315789474, "alnum_prop": 0.6557759626604434, "repo_name": "APPIAN-PET/APPIAN", "id": "fb25d5ec76a806d131db738f0620a471971c6d22", "size": "11998", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/mri.py", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "810" }, { "name": "CSS", "bytes": "291714" }, { "name": "Dockerfile", "bytes": "4753" }, { "name": "HTML", "bytes": "1435754" }, { "name": "Handlebars", "bytes": "282945" }, { "name": "JavaScript", "bytes": "6259703" }, { "name": "Less", "bytes": "50506" }, { "name": "Makefile", "bytes": "603" }, { "name": "PHP", "bytes": "411588" }, { "name": "Python", "bytes": "399954" }, { "name": "Shell", "bytes": "2035" }, { "name": "Singularity", "bytes": "138" } ], "symlink_target": "" }
"""Time manipulation functions and variables. This module contain common methods that can be used to convert timestamps from various formats into number of micro seconds since January 1, 1970, 00:00:00 UTC that is used internally to store timestamps. It also contains various functions to represent timestamps in a more human readable form. """ import calendar import datetime import logging import time import construct import dateutil.parser import pytz from plaso.lib import errors from plaso.lib import py2to3 MONTH_DICT = { u'jan': 1, u'feb': 2, u'mar': 3, u'apr': 4, u'may': 5, u'jun': 6, u'jul': 7, u'aug': 8, u'sep': 9, u'oct': 10, u'nov': 11, u'dec': 12} class Timestamp(object): """Class for converting timestamps to plaso timestamps. The Plaso timestamp is a 64-bit signed timestamp value containing: micro seconds since 1970-01-01 00:00:00. The timestamp is not necessarily in UTC. """ # The minimum timestamp in seconds TIMESTAMP_MIN_SECONDS = -(((1 << 63) - 1) / 1000000) # The maximum timestamp in seconds TIMESTAMP_MAX_SECONDS = ((1 << 63) - 1) / 1000000 # The minimum timestamp in micro seconds TIMESTAMP_MIN_MICRO_SECONDS = -((1 << 63) - 1) # The maximum timestamp in micro seconds TIMESTAMP_MAX_MICRO_SECONDS = (1 << 63) - 1 # Timestamp that represents the timestamp representing not # a date and time value. # TODO: replace this with a real None implementation. NONE_TIMESTAMP = 0 # The days per month of a non leap year DAYS_PER_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] # The number of seconds in a day SECONDS_PER_DAY = 24 * 60 * 60 # The number of micro seconds per second MICRO_SECONDS_PER_SECOND = 1000000 # The multiplication factor to change milliseconds to micro seconds. MILLI_SECONDS_TO_MICRO_SECONDS = 1000 # The difference between Jan 1, 1980 and Jan 1, 1970 in seconds. FAT_DATE_TO_POSIX_BASE = 315532800 # The difference between Jan 1, 1601 and Jan 1, 1970 in micro seconds WEBKIT_TIME_TO_POSIX_BASE = 11644473600 * 1000000 # The difference between Jan 1, 1601 and Jan 1, 1970 in 100 nanoseconds. FILETIME_TO_POSIX_BASE = 11644473600 * 10000000 # The difference between Nov 10, 1582 and Jan 1, 1970 in 100 nanoseconds. UUID_TIME_TO_POSIX_BASE = 12219292800 * 10000000 # The number of seconds between January 1, 1904 and Jan 1, 1970. # Value confirmed with sleuthkit: # http://svn.sleuthkit.org/repos/sleuthkit/trunk/tsk3/fs/tsk_hfs.h # and linux source file linux/include/linux/hfsplus_fs.h HFSTIME_TO_POSIX_BASE = 2082844800 # The number of seconds between January 1, 1970 and January 1, 2001. # As specified in: # https://developer.apple.com/library/ios/documentation/ # cocoa/Conceptual/DatesAndTimes/Articles/dtDates.html COCOA_TIME_TO_POSIX_BASE = 978307200 # The difference between POSIX (Jan 1, 1970) and DELPHI (Dec 30, 1899). # http://docwiki.embarcadero.com/Libraries/XE3/en/System.TDateTime DELPHI_TIME_TO_POSIX_BASE = 25569 # The Windows SYSTEMTIME structure. SYSTEMTIME_STRUCT = construct.Struct( u'timestamp', construct.ULInt16(u'year'), construct.ULInt16(u'month'), construct.ULInt16(u'weekday'), construct.ULInt16(u'day'), construct.ULInt16(u'hour'), construct.ULInt16(u'minutes'), construct.ULInt16(u'seconds'), construct.ULInt16(u'milliseconds')) @classmethod def CopyFromString(cls, time_string): """Copies a timestamp from a string containing a date and time value. Args: time_string: A string containing a date and time value formatted as: YYYY-MM-DD hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The time of day, seconds fraction and timezone offset are optional. The default timezone is UTC. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. Raises: ValueError: if the time string is invalid or not supported. """ if not time_string: raise ValueError(u'Invalid time string.') time_string_length = len(time_string) # The time string should at least contain 'YYYY-MM-DD'. if (time_string_length < 10 or time_string[4] != u'-' or time_string[7] != u'-'): raise ValueError(u'Invalid time string.') # If a time of day is specified the time string it should at least # contain 'YYYY-MM-DD hh:mm:ss'. if (time_string_length > 10 and ( time_string_length < 19 or time_string[10] != u' ' or time_string[13] != u':' or time_string[16] != u':')): raise ValueError(u'Invalid time string.') try: year = int(time_string[0:4], 10) except ValueError: raise ValueError(u'Unable to parse year.') try: month = int(time_string[5:7], 10) except ValueError: raise ValueError(u'Unable to parse month.') if month not in range(1, 13): raise ValueError(u'Month value out of bounds.') try: day_of_month = int(time_string[8:10], 10) except ValueError: raise ValueError(u'Unable to parse day of month.') if day_of_month not in range(1, 32): raise ValueError(u'Day of month value out of bounds.') hours = 0 minutes = 0 seconds = 0 if time_string_length > 10: try: hours = int(time_string[11:13], 10) except ValueError: raise ValueError(u'Unable to parse hours.') if hours not in range(0, 24): raise ValueError(u'Hours value out of bounds.') try: minutes = int(time_string[14:16], 10) except ValueError: raise ValueError(u'Unable to parse minutes.') if minutes not in range(0, 60): raise ValueError(u'Minutes value out of bounds.') try: seconds = int(time_string[17:19], 10) except ValueError: raise ValueError(u'Unable to parse day of seconds.') if seconds not in range(0, 60): raise ValueError(u'Seconds value out of bounds.') micro_seconds = 0 timezone_offset = 0 if time_string_length > 19: if time_string[19] != u'.': timezone_index = 19 else: for timezone_index in range(19, time_string_length): if time_string[timezone_index] in [u'+', u'-']: break # The calculation that follow rely on the timezone index to point # beyond the string in case no timezone offset was defined. if timezone_index == time_string_length - 1: timezone_index += 1 if timezone_index > 19: fraction_of_seconds_length = timezone_index - 20 if fraction_of_seconds_length not in [3, 6]: raise ValueError(u'Invalid time string.') try: micro_seconds = int(time_string[20:timezone_index], 10) except ValueError: raise ValueError(u'Unable to parse fraction of seconds.') if fraction_of_seconds_length == 3: micro_seconds *= 1000 if timezone_index < time_string_length: if (time_string_length - timezone_index != 6 or time_string[timezone_index + 3] != u':'): raise ValueError(u'Invalid time string.') try: timezone_offset = int(time_string[ timezone_index + 1:timezone_index + 3]) except ValueError: raise ValueError(u'Unable to parse timezone hours offset.') if timezone_offset not in range(0, 24): raise ValueError(u'Timezone hours offset value out of bounds.') # Note that when the sign of the timezone offset is negative # the difference needs to be added. We do so by flipping the sign. if time_string[timezone_index] == u'-': timezone_offset *= 60 else: timezone_offset *= -60 try: timezone_offset += int(time_string[ timezone_index + 4:timezone_index + 6]) except ValueError: raise ValueError(u'Unable to parse timezone minutes offset.') timezone_offset *= 60 timestamp = int(calendar.timegm(( year, month, day_of_month, hours, minutes, seconds))) return ((timestamp + timezone_offset) * 1000000) + micro_seconds @classmethod def CopyToDatetime(cls, timestamp, timezone, raise_error=False): """Copies the timestamp to a datetime object. Args: timestamp: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. timezone: The timezone (pytz.timezone) object. raise_error: Boolean that if set to True will not absorb an OverflowError if the timestamp is out of bounds. By default there will be no error raised. Returns: A datetime object (instance of datetime.datetime). A datetime object of January 1, 1970 00:00:00 UTC is returned on error if raises_error is not set. Raises: OverflowError: If raises_error is set to True and an overflow error occurs. """ datetime_object = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC) try: datetime_object += datetime.timedelta(microseconds=timestamp) return datetime_object.astimezone(timezone) except OverflowError as exception: if raise_error: raise logging.error(( u'Unable to copy {0:d} to a datetime object with error: ' u'{1:s}').format(timestamp, exception)) return datetime_object @classmethod def CopyToIsoFormat(cls, timestamp, timezone=pytz.UTC, raise_error=False): """Copies the timestamp to an ISO 8601 formatted string. Args: timestamp: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. timezone: Optional timezone (instance of pytz.timezone). raise_error: Boolean that if set to True will not absorb an OverflowError if the timestamp is out of bounds. By default there will be no error raised. Returns: A string containing an ISO 8601 formatted date and time. """ datetime_object = cls.CopyToDatetime( timestamp, timezone, raise_error=raise_error) return datetime_object.isoformat() @classmethod def CopyToPosix(cls, timestamp): """Converts microsecond timestamps to POSIX timestamps. Args: timestamp: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. Returns: The timestamp which is an integer containing the number of seconds since January 1, 1970, 00:00:00 UTC. """ return timestamp // cls.MICRO_SECONDS_PER_SECOND @classmethod def DaysInMonth(cls, month, year): """Determines the days in a month for a specific year. Args: month: The month where 0 represents January. year: The year as in 1970. Returns: An integer containing the number of days in the month. Raises: ValueError: if the month value is invalid. """ if month not in range(0, 12): raise ValueError(u'Invalid month value') days_per_month = cls.DAYS_PER_MONTH[month] if month == 1 and cls.IsLeapYear(year): days_per_month += 1 return days_per_month @classmethod def DaysInYear(cls, year): """Determines the days in a year. Args: year: The year as in 1970. Returns: An integer containing the number of days in the year. """ days_in_year = 365 if cls.IsLeapYear(year): return days_in_year + 1 return days_in_year @classmethod def DayOfYear(cls, day, month, year): """Determines the day of the year for a specific day of a month in a year. Args: day: The day of the month where 0 represents the first day. month: The month where 0 represents January. year: The year as in 1970. Returns: An integer containing the day of year. """ day_of_year = day for past_month in range(0, month): day_of_year += cls.DaysInMonth(past_month, year) return day_of_year @classmethod def FromCocoaTime(cls, cocoa_time): """Converts a Cocoa time to a timestamp. In Cocoa, time and date values are stored in a unsigned 32-bit integer containing the number of seconds since January 1, 2001 at 00:00:00 (midnight) UTC (GMT). Args: cocoa_time: The timestamp in Cocoa format. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ return cls.FromPosixTime(cocoa_time + cls.COCOA_TIME_TO_POSIX_BASE) @classmethod def FromDelphiTime(cls, delphi_time): """Converts a Delphi time to a timestamp. In Delphi, time and date values (TDateTime) are stored in a unsigned little endian 64-bit floating point containing the number of seconds since December 30, 1899 at 00:00:00 (midnight) Local Timezone. TDateTime does not have any time zone information. Args: delphi_time: The timestamp in Delphi format. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ posix_time = (delphi_time - cls.DELPHI_TIME_TO_POSIX_BASE) * 86400.0 if (posix_time < cls.TIMESTAMP_MIN_SECONDS or posix_time > cls.TIMESTAMP_MAX_SECONDS): return 0 return cls.FromPosixTime(int(posix_time)) @classmethod def FromFatDateTime(cls, fat_date_time): """Converts a FAT date and time into a timestamp. FAT date time is mainly used in DOS/Windows file formats and FAT. The FAT date and time is a 32-bit value containing two 16-bit values: * The date (lower 16-bit). * bits 0 - 4: day of month, where 1 represents the first day * bits 5 - 8: month of year, where 1 represent January * bits 9 - 15: year since 1980 * The time of day (upper 16-bit). * bits 0 - 4: seconds (in 2 second intervals) * bits 5 - 10: minutes * bits 11 - 15: hours Args: fat_date_time: The 32-bit FAT date time. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ number_of_seconds = cls.FAT_DATE_TO_POSIX_BASE day_of_month = (fat_date_time & 0x1f) - 1 month = ((fat_date_time >> 5) & 0x0f) - 1 year = (fat_date_time >> 9) & 0x7f if day_of_month < 0 or day_of_month > 30 or month < 0 or month > 11: return 0 number_of_days = cls.DayOfYear(day_of_month, month, 1980 + year) for past_year in range(0, year): number_of_days += cls.DaysInYear(past_year) fat_date_time >>= 16 seconds = (fat_date_time & 0x1f) * 2 minutes = (fat_date_time >> 5) & 0x3f hours = (fat_date_time >> 11) & 0x1f if hours > 23 or minutes > 59 or seconds > 59: return 0 number_of_seconds += (((hours * 60) + minutes) * 60) + seconds number_of_seconds += number_of_days * cls.SECONDS_PER_DAY return number_of_seconds * cls.MICRO_SECONDS_PER_SECOND @classmethod def FromFiletime(cls, filetime): """Converts a FILETIME into a timestamp. FILETIME is mainly used in Windows file formats and NTFS. The FILETIME is a 64-bit value containing: 100th nano seconds since 1601-01-01 00:00:00 Technically FILETIME consists of 2 x 32-bit parts and is presumed to be unsigned. Args: filetime: The 64-bit FILETIME timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ # TODO: Add a handling for if the timestamp equals to zero. if filetime < 0: return 0 timestamp = (filetime - cls.FILETIME_TO_POSIX_BASE) / 10 if timestamp > cls.TIMESTAMP_MAX_MICRO_SECONDS: return 0 return timestamp @classmethod def FromHfsTime(cls, hfs_time, timezone=pytz.UTC, is_dst=False): """Converts a HFS time to a timestamp. HFS time is the same as HFS+ time, except stored in the local timezone of the user. Args: hfs_time: Timestamp in the hfs format (32 bit unsigned int). timezone: The timezone object of the system's local time. is_dst: A boolean to indicate the timestamp is corrected for daylight savings time (DST) only used for the DST transition period. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ timestamp_local = cls.FromHfsPlusTime(hfs_time) return cls.LocaltimeToUTC(timestamp_local, timezone, is_dst) @classmethod def FromHfsPlusTime(cls, hfs_time): """Converts a HFS+ time to a timestamp. In HFS+ date and time values are stored in an unsigned 32-bit integer containing the number of seconds since January 1, 1904 at 00:00:00 (midnight) UTC (GMT). Args: hfs_time: The timestamp in HFS+ format. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ return cls.FromPosixTime(hfs_time - cls.HFSTIME_TO_POSIX_BASE) @classmethod def FromJavaTime(cls, java_time): """Converts a Java time to a timestamp. Jave time is the number of milliseconds since January 1, 1970, 00:00:00 UTC. URL: http://docs.oracle.com/javase/7/docs/api/ java/sql/Timestamp.html#getTime%28%29 Args: java_time: The Java Timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ return java_time * cls.MILLI_SECONDS_TO_MICRO_SECONDS @classmethod def FromPosixTime(cls, posix_time): """Converts a POSIX timestamp into a timestamp. The POSIX time is a signed 32-bit or 64-bit value containing: seconds since 1970-01-01 00:00:00 Args: posix_time: The POSIX timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ if (posix_time < cls.TIMESTAMP_MIN_SECONDS or posix_time > cls.TIMESTAMP_MAX_SECONDS): return 0 return int(posix_time) * cls.MICRO_SECONDS_PER_SECOND @classmethod def FromPosixTimeWithMicrosecond(cls, posix_time, microsecond): """Converts a POSIX timestamp with microsecond into a timestamp. The POSIX time is a signed 32-bit or 64-bit value containing: seconds since 1970-01-01 00:00:00 Args: posix_time: The POSIX timestamp. microsecond: The microseconds to add to the timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ timestamp = cls.FromPosixTime(posix_time) if not timestamp: return 0 return timestamp + microsecond @classmethod def FromPythonDatetime(cls, datetime_object): """Converts a Python datetime object into a timestamp. Args: datetime_object: The datetime object (instance of datetime.datetime). Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ if not isinstance(datetime_object, datetime.datetime): return 0 posix_time = int(calendar.timegm(datetime_object.utctimetuple())) return cls.FromPosixTime(posix_time) + datetime_object.microsecond @classmethod def FromRFC2579Datetime( cls, year, month, day, hour, minutes, seconds, deciseconds, direction_from_utc, hours_from_utc, minutes_from_utc): """Converts values from an RFC2579 time to a timestamp. See https://tools.ietf.org/html/rfc2579. Args: year: An integer representing the year. month: An integer between 1 and 12. day: An integer representing the number of day in the month. hour: An integer representing the hour, 0 <= hour < 24. minutes: An integer, 0 <= minute < 60. seconds: An integer, 0 <= second < 60. deciseconds: An integer, 0 <= deciseconds < 10 direction_from_utc: An ascii character, either '+' or '-'. hours_from_utc: An integer representing the number of hours the time is offset from UTC. minutes_from_utc: An integer representing the number of seconds the time is offset from UTC. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. Raises: TimestampError: if the timestamp cannot be created from the time parts. """ microseconds = deciseconds * 100000 utc_offset_minutes = (hours_from_utc * 60) + minutes_from_utc if direction_from_utc == u'-': utc_offset_minutes = -utc_offset_minutes timezone = pytz.FixedOffset(utc_offset_minutes) return cls.FromTimeParts( year, month, day, hour, minutes, seconds, microseconds, timezone) @classmethod def FromSystemtime(cls, systemtime): """Converts a SYSTEMTIME structure into a timestamp. The SYSTEMTIME structure is a 128-bit struct containing 8 little endian 16-bit integers structured like so: struct { WORD year, WORD month, WORD day_of_week, WORD day, WORD hour, WORD minute, WORD second, WORD millisecond } Args: systemtime (bytes): 128-bit SYSTEMTIME timestamp value. Returns: int: timestamp, which contains the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ try: timestamp = cls.SYSTEMTIME_STRUCT.parse(systemtime) except construct.ConstructError as exception: raise errors.TimestampError( u'Unable to create timestamp from {0:s} with error: {1:s}'.format( systemtime, exception)) return cls.FromTimeParts( year=timestamp.year, month=timestamp.month, day=timestamp.day, hour=timestamp.hour, minutes=timestamp.minutes, seconds=timestamp.seconds, microseconds=( timestamp.milliseconds * cls.MILLI_SECONDS_TO_MICRO_SECONDS)) @classmethod def FromTimeParts( cls, year, month, day, hour, minutes, seconds, microseconds=0, timezone=pytz.UTC): """Converts a list of time entries to a timestamp. Args: year: An integer representing the year. month: An integer between 1 and 12. day: An integer representing the number of day in the month. hour: An integer representing the hour, 0 <= hour < 24. minutes: An integer, 0 <= minute < 60. seconds: An integer, 0 <= second < 60. microseconds: Optional number of microseconds ranging from: 0 <= microsecond < 1000000. timezone: Optional timezone (instance of pytz.timezone). Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. Raises: TimestampError: if the timestamp cannot be created from the time parts. """ try: date = datetime.datetime( year, month, day, hour, minutes, seconds, microseconds) except ValueError as exception: raise errors.TimestampError(( u'Unable to create timestamp from {0:04d}-{1:02d}-{2:02d} ' u'{3:02d}:{4:02d}:{5:02d}.{6:06d} with error: {7:s}').format( year, month, day, hour, minutes, seconds, microseconds, exception)) if isinstance(timezone, py2to3.STRING_TYPES): timezone = pytz.timezone(timezone) date_use = timezone.localize(date) posix_time = int(calendar.timegm(date_use.utctimetuple())) return cls.FromPosixTime(posix_time) + microseconds @classmethod def FromTimeString( cls, time_string, dayfirst=False, gmt_as_timezone=True, timezone=pytz.UTC): """Converts a string containing a date and time value into a timestamp. Args: time_string: String that contains a date and time value. dayfirst: An optional boolean argument. If set to true then the parser will change the precedence in which it parses timestamps from MM-DD-YYYY to DD-MM-YYYY (and YYYY-MM-DD will be YYYY-DD-MM, etc). gmt_as_timezone: Sometimes the dateutil parser will interpret GMT and UTC the same way, that is not make a distinction. By default this is set to true, that is GMT can be interpreted differently than UTC. If that is not the expected result this attribute can be set to false. timezone: Optional timezone object (instance of pytz.timezone) that the data and time value in the string represents. This value is used when the timezone cannot be determined from the string. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. Raises: TimestampError: if the time string could not be parsed. """ if not gmt_as_timezone and time_string.endswith(' GMT'): time_string = u'{0:s}UTC'.format(time_string[:-3]) try: # TODO: deprecate the use of dateutil parser. datetime_object = dateutil.parser.parse(time_string, dayfirst=dayfirst) except (TypeError, ValueError) as exception: raise errors.TimestampError(( u'Unable to convert time string: {0:s} in to a datetime object ' u'with error: {1:s}').format(time_string, exception)) if datetime_object.tzinfo: datetime_object = datetime_object.astimezone(pytz.UTC) else: datetime_object = timezone.localize(datetime_object) return cls.FromPythonDatetime(datetime_object) @classmethod def FromUUIDTime(cls, uuid_time): """Converts a UUID verion 1 time into a timestamp. The UUID version 1 time is a 60-bit value containing: 100th nano seconds since 1582-10-15 00:00:00 Args: uuid_time: The 60-bit UUID version 1 timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ # TODO: Add a handling for if the timestamp equals to zero. if uuid_time < 0: return 0 timestamp = (uuid_time - cls.UUID_TIME_TO_POSIX_BASE) / 10 if timestamp > cls.TIMESTAMP_MAX_MICRO_SECONDS: return 0 return timestamp @classmethod def FromWebKitTime(cls, webkit_time): """Converts a WebKit time into a timestamp. The WebKit time is a 64-bit value containing: micro seconds since 1601-01-01 00:00:00 Args: webkit_time: The 64-bit WebKit time timestamp. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ if webkit_time < (cls.TIMESTAMP_MIN_MICRO_SECONDS + cls.WEBKIT_TIME_TO_POSIX_BASE): return 0 return webkit_time - cls.WEBKIT_TIME_TO_POSIX_BASE @classmethod def GetNow(cls): """Retrieves the current time (now) as a timestamp in UTC. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. """ time_elements = time.gmtime() return calendar.timegm(time_elements) * 1000000 @classmethod def IsLeapYear(cls, year): """Determines if a year is a leap year. A leap year is divisible by 4 and not by 100 or by 400. Args: year: The year as in 1970. Returns: A boolean value indicating the year is a leap year. """ return (year % 4 == 0 and year % 100 != 0) or year % 400 == 0 @classmethod def LocaltimeToUTC(cls, timestamp, timezone, is_dst=False): """Converts the timestamp in localtime of the timezone to UTC. Args: timestamp: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC. timezone: The timezone (pytz.timezone) object. is_dst: A boolean to indicate the timestamp is corrected for daylight savings time (DST) only used for the DST transition period. Returns: The timestamp which is an integer containing the number of micro seconds since January 1, 1970, 00:00:00 UTC or 0 on error. """ if timezone and timezone != pytz.UTC: datetime_object = ( datetime.datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=None) + datetime.timedelta(microseconds=timestamp)) # Check if timezone is UTC since utcoffset() does not support is_dst # for UTC and will raise. datetime_delta = timezone.utcoffset(datetime_object, is_dst=is_dst) seconds_delta = int(datetime_delta.total_seconds()) timestamp -= seconds_delta * cls.MICRO_SECONDS_PER_SECOND return timestamp @classmethod def RoundToSeconds(cls, timestamp): """Takes a timestamp value and rounds it to a second precision.""" leftovers = timestamp % cls.MICRO_SECONDS_PER_SECOND scrubbed = timestamp - leftovers rounded = round(float(leftovers) / cls.MICRO_SECONDS_PER_SECOND) return int(scrubbed + rounded * cls.MICRO_SECONDS_PER_SECOND) def GetCurrentYear(): """Determines the current year.""" datetime_object = datetime.datetime.now() return datetime_object.year def GetYearFromPosixTime(posix_time, timezone=pytz.UTC): """Gets the year from a POSIX timestamp The POSIX time is the number of seconds since 1970-01-01 00:00:00 UTC. Args: posix_time: An integer containing the number of seconds since 1970-01-01 00:00:00 UTC. timezone: Optional timezone of the POSIX timestamp. Returns: The year of the POSIX timestamp. Raises: ValueError: If the posix timestamp is out of the range of supported values. """ datetime_object = datetime.datetime.fromtimestamp(posix_time, tz=timezone) return datetime_object.year
{ "content_hash": "53b75affcbc140729f888e8acbd16e57", "timestamp": "", "source": "github", "line_count": 914, "max_line_length": 80, "avg_line_length": 33.4507658643326, "alnum_prop": 0.6578792438019232, "repo_name": "dc3-plaso/plaso", "id": "b770edb229c5ce414cc3b81233653d3a2b80242e", "size": "30598", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plaso/lib/timelib.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1683" }, { "name": "Makefile", "bytes": "1151" }, { "name": "Python", "bytes": "3875098" }, { "name": "Shell", "bytes": "17861" } ], "symlink_target": "" }
from ryu.base import app_manager from ryu.controller import ofp_event from ryu.controller.handler import CONFIG_DISPATCHER , MAIN_DISPATCHER, DEAD_DISPATCHER from ryu.controller.handler import set_ev_cls from ryu.ofproto import ofproto_v1_3 import time max_entry_num=16384 max_port_id=52 max_vlan_id=4093 l2_mcast_group_curr_num=0 sleep_time=1 class L2McastGroupMaxTest(app_manager.RyuApp): OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] def __init__(self, *args, **kwargs): super(L2McastGroupMaxTest , self).__init__(*args, **kwargs) def clearAllFlowsAndGroups(self, datapath): ofproto = datapath.ofproto parser = datapath.ofproto_parser #clear all flows match = parser.OFPMatch() self.del_flow(datapath , ofproto.OFPTT_ALL, 0, match, inst=[]) #clear all groups buckets = [] self.del_group(datapath, ofproto.OFPGT_INDIRECT, ofproto.OFPG_ALL, buckets) time.sleep(sleep_time) def add_group(self, datapath, type, group_id, buckets): ofproto = datapath.ofproto parser = datapath.ofproto_parser req = parser.OFPGroupMod(datapath, ofproto.OFPGC_ADD, type, group_id, buckets) datapath.send_msg(req) def del_group(self, datapath, type, group_id, buckets): ofproto = datapath.ofproto parser = datapath.ofproto_parser req = parser.OFPGroupMod(datapath, ofproto.OFPGC_DELETE, type, group_id, buckets) datapath.send_msg(req) def add_flow(self, datapath , table_id, priority , match , inst): ofproto = datapath.ofproto parser = datapath.ofproto_parser mod = parser.OFPFlowMod(datapath=datapath, table_id=table_id, priority=priority , match=match , instructions=inst) datapath.send_msg(mod) def del_flow(self, datapath , table_id, priority , match , inst): ofproto = datapath.ofproto parser = datapath.ofproto_parser mod = parser.OFPFlowMod(datapath=datapath, command=ofproto.OFPFC_DELETE, table_id=table_id, priority=priority, out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY, match=match , instructions=inst) datapath.send_msg(mod) def create_all_l2_interface(self, datapath, pvid, p_start, p_end, v_start, v_end): ofproto = datapath.ofproto parser = datapath.ofproto_parser for v in range(v_start, v_end+1, 1): print "create all L2 intf groups in VLAN %d" % v for p in range(p_start, p_end+1, 1): group_id=(v<<16|p) if v==pvid: actions = [parser.OFPActionOutput(port=p), parser.OFPActionPopVlan()] else: actions = [parser.OFPActionOutput(port=p)] buckets = [parser.OFPBucket(weight=100, watch_port=0, watch_group=0, actions=actions)] self.add_group(datapath, ofproto.OFPGT_INDIRECT, group_id, buckets) time.sleep(sleep_time) print "finish create l2_interface group" # ./dpctl tcp:192.168.1.10:6633 group-mod cmd=add,type=all,group=0x30020001 # group=any,port=any,weight=1 group=0x20001 group=any,port=any,weight=1 # group=0x20003 group=any,port=any,weight=1 group=0x20005 def create_all_l2_mcast_group(self, datapath): ofproto = datapath.ofproto parser = datapath.ofproto_parser print 'Start to test l2 multicast group' for i in range(1, (max_entry_num / max_vlan_id) + 1, 1): for v in range(1, max_vlan_id+1, 1): #print 'create L2 multicast group in VLAN %d' % v buckets = [] for p in range(1, max_port_id+1, 1): #create l2_mcast_group, type is all actions = [parser.OFPActionGroup((v <<16) + p)] buckets.append(parser.OFPBucket(weight=100, watch_port=ofproto.OFPP_ANY, watch_group=ofproto.OFPG_ANY, actions=actions)) #print buckets self.add_group(datapath, ofproto.OFPGT_ALL, (0x30000000 + (v<<16) + i), buckets) #print 'Finish to create L2 multicast group %x' % (0x30000000 + (v<<16) + i) time.sleep(sleep_time) print 'Finish to full l2 multicast group testing' def send_group_stats_request(self, datapath): print "send group stats request" ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGroupStatsRequest(datapath, 0, ofp.OFPG_ALL) datapath.send_msg(req) @set_ev_cls(ofp_event.EventOFPGroupStatsReply, MAIN_DISPATCHER) def group_stats_reply_handler(self, ev): global l2_mcast_group_curr_num print 'group stats reply handler' for stat in ev.msg.body: #print('length=%d group_id=%x ' # 'ref_count=%d packet_count=%d byte_count=%d ' # 'duration_sec=%d duration_nsec=%d\n' % # (stat.length, stat.group_id, # stat.ref_count, stat.packet_count, # stat.byte_count, stat.duration_sec, # stat.duration_nsec)) if ((stat.group_id & 0x30000000) != 0): l2_mcast_group_curr_num += 1; if (l2_mcast_group_curr_num != max_entry_num): print ('Get group stats reply, not reach max L2 multicast group yet, current entry is %d, max is %d' % (l2_mcast_group_curr_num, max_entry_num)) else: print ('Get group stats reply, success to reach max L2 multicast group, current entry is %d, max is %d' % (l2_mcast_group_curr_num, max_entry_num)) @set_ev_cls(ofp_event.EventOFPSwitchFeatures , CONFIG_DISPATCHER) def switch_features_handler(self, ev): datapath = ev.msg.datapath self.clearAllFlowsAndGroups(datapath) self.create_all_l2_interface(datapath, pvid=0, p_start=1, p_end=max_port_id, v_start=1, v_end=max_vlan_id) self.create_all_l2_mcast_group(datapath) #get all groups to verify self.send_group_stats_request(datapath)
{ "content_hash": "9469635540eb92ed7e0d4aa2e0686e3f", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 159, "avg_line_length": 46.166666666666664, "alnum_prop": 0.5944121801914927, "repo_name": "macauleycheng/AOS_OF_Example", "id": "c24eaccf1c0278e4b954a055d2b634c647e66ce8", "size": "6371", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "00-table-group-unit-test/00-max-flow-test/L2_Mcast_Group_Max.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "483774" }, { "name": "Shell", "bytes": "38" } ], "symlink_target": "" }
__source__ = 'https://leetcode.com/problems/implement-stack-using-queues/' # https://github.com/kamyu104/LeetCode/blob/master/Python/implement-stack-using-queues.py # Time: push: O(n), pop: O(1), top: O(1) # Space: O(n) # # Description: Leetcode # 225. Implement Stack using Queues # # Implement the following operations of a stack using queues. # # push(x) -- Push element x onto stack. # pop() -- Removes the element on top of the stack. # top() -- Get the top element. # empty() -- Return whether the stack is empty. # Notes: # You must use only standard operations of a queue -- which # means only push to back, peek/pop from front, size, and is # empty operations are valid. # Depending on your language, queue may not be supported natively. # You may simulate a queue by using a list or deque (double-ended # queue), as long as you use only standard operations of a queue. # You may assume that all operations are valid (for example, no pop # or top operations will be called on an empty stack). # # Companies # Bloomberg # Related Topics # Stack Design # Similar Questions # Implement Queue using Stacks # import unittest import collections class Queue: def __init__(self): self.data = collections.deque() def push(self, x): self.data.append(x) def peek(self): return self.data[0] def pop(self): return self.data.popleft() def size(self): return len(self.data) def empty(self): return len(self.data) == 0 class Stack: # initialize your data structure here. def __init__(self): self.q_ = Queue() # @param x, an integer # @return nothing def push(self, x): self.q_.push(x) for _ in xrange(self.q_.size() - 1): self.q_.push(self.q_.pop()) # @return nothing def pop(self): self.q_.pop() # @return an integer def top(self): return self.q_.peek() # @return an boolean def empty(self): return self.q_.empty() # Time: push: O(1), pop: O(n), top: O(1) # Space: O(n) class Stack2: # initialize your data structure here. def __init__(self): self.q_ = Queue() self.top_ = None # @param x, an integer # @return nothing def push(self, x): self.q_.push(x) self.top_ = x # @return nothing def pop(self): for _ in xrange(self.q_.size() - 1): self.top_ = self.q_.pop() self.q_.push(self.top_) self.q_.pop() # @return an integer def top(self): return self.top_ # @return an boolean def empty(self): return self.q_.empty() class Stack3(object): def __init__(self): """ initialize your data structure here. """ self.queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.queue.append(x) for i in xrange(len(self.queue) - 1): self.queue.append(self.queue.pop(0)) def pop(self): """ :rtype: nothing """ self.queue.pop(0) def top(self): """ :rtype: int """ return self.queue[0] def empty(self): """ :rtype: bool """ return len(self.queue) == 0 class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) if __name__ == '__main__': unittest.main() Java = ''' # Thought: https://leetcode.com/problems/implement-stack-using-queues/solution/ # only one queue # 56ms 82.44% class MyStack { Queue<Integer> mQ; /** Initialize your data structure here. */ public MyStack() { mQ = new LinkedList<>(); } /** Push element x onto stack. */ public void push(int x) { mQ.add(x); for (int i = 0; i< mQ.size() - 1; i++) { //except the last one just added mQ.add(mQ.poll()); } } /** Removes the element on top of the stack and returns that element. */ public int pop() { return mQ.poll(); } /** Get the top element. */ public int top() { return mQ.peek(); } /** Returns whether the stack is empty. */ public boolean empty() { return mQ.isEmpty(); } } # Using 2 queue # 80ms 19.43% class MyStack { Queue<Integer> q1; Queue<Integer> q2; /** Initialize your data structure here. */ public MyStack() { q1 = new LinkedList<>(); q2 = new LinkedList<>(); } /** Push element x onto stack. */ public void push(int x) { q1.add(x); if (q1.size() > 1) q2.add(q1.poll()); } /** Removes the element on top of the stack and returns that element. */ public int pop() { int n = q1.poll(); while (q2.size() > 0) q1.add(q2.poll()); while (q1.size() > 1) q2.add(q1.poll()); return n; } /** Get the top element. */ public int top() { return q1.peek(); } /** Returns whether the stack is empty. */ public boolean empty() { return q1.isEmpty() && q2.isEmpty(); } } # Using 2 queue and swap q # 58ms 71.91% class MyStack { /** Initialize your data structure here. */ private Queue<Integer> q1, q2; int top; public MyStack() { q1 = new LinkedList<Integer>(); q2 = new LinkedList<Integer>(); } /** Push element x onto stack. */ public void push(int x) { q2.offer(x); top = x; } /** Removes the element on top of the stack and returns that element. */ public int pop() { while(q2.size() > 1) { top = q2.poll(); q1.offer(top); } int temp = q2.poll(); swapQ(); return temp; } private void swapQ() { Queue<Integer> temp = q1; q1 = q2; q2 = temp; } /** Get the top element. */ public int top() { return top; } /** Returns whether the stack is empty. */ public boolean empty() { return q1.isEmpty() && q2.isEmpty(); } } # Stack usually implemented using list # FYI only, not valid for this q # 55ms 88.09% class MyStack { LinkedList<Integer> mList; /** Initialize your data structure here. */ public MyStack() { mList = new LinkedList<Integer>(); } /** Push element x onto stack. */ public void push(int x) { mList.add(x); } /** Removes the element on top of the stack and returns that element. */ public int pop() { //Reverse List if ( mList == null ) return -1; else { int temp =(mList.getLast()); mList.removeLast(); return temp; } } /** Get the top element. */ public int top() { return mList.isEmpty() ? -1 : mList.getLast(); } /** Returns whether the stack is empty. */ public boolean empty() { return mList.isEmpty(); } } /** * Your MyStack object will be instantiated and called as such: * MyStack obj = new MyStack(); * obj.push(x); * int param_2 = obj.pop(); * int param_3 = obj.top(); * boolean param_4 = obj.empty(); */ '''
{ "content_hash": "a46715acd51e1462a804e6ca90b0217d", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 89, "avg_line_length": 22.652996845425868, "alnum_prop": 0.5513159727057513, "repo_name": "JulyKikuAkita/PythonPrac", "id": "29727ba2bd817a5640200f317700f8c4815a7eb6", "size": "7181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cs15211/Implement_Stack_Using_Queues.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "DIGITAL Command Language", "bytes": "191608" }, { "name": "HTML", "bytes": "647778" }, { "name": "Python", "bytes": "5429558" } ], "symlink_target": "" }
from manager_rest.security import ( MissingPremiumFeatureResource, ) try: from cloudify_premium.secrets_provider.secured_secrets_provider_resource \ import ( SecuredSecretsProviderResource, SecuredSecretsProviderKeyResource, ) except ImportError: SecuredSecretsProviderResource = MissingPremiumFeatureResource SecuredSecretsProviderKeyResource = MissingPremiumFeatureResource class SecretsProvider(SecuredSecretsProviderResource): pass class SecretsProviderKey(SecuredSecretsProviderKeyResource): pass
{ "content_hash": "5175e812b7c725647e4eae4cdc2af0da", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 78, "avg_line_length": 27.19047619047619, "alnum_prop": 0.7810858143607706, "repo_name": "cloudify-cosmo/cloudify-manager", "id": "13983559b3331494545697756a26a11a960f07aa", "size": "571", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rest-service/manager_rest/rest/resources_v3_1/secrets_provider.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Clojure", "bytes": "4067" }, { "name": "Dockerfile", "bytes": "3843" }, { "name": "HTML", "bytes": "320" }, { "name": "Mako", "bytes": "494" }, { "name": "PLpgSQL", "bytes": "119062" }, { "name": "Python", "bytes": "3825971" }, { "name": "Shell", "bytes": "49121" } ], "symlink_target": "" }
import pandas as pd import sys import numpy as np import scipy as sp import os import itertools def pickle2df(datadir, filePath = 'wifi_passenger_count_10.pkl'): ''' 读取pickle格式数据,并将其转为dataframe ''' data = pd.read_pickle(os.path.join(datadir,filePath)) # data.info() print 'read pickle and transform to dataframe done ...' return data def generate_submit_sample_without_lable(datadir, filePath = 'WIFI_AP_Passenger_Records_chusai_1stround.csv'): ''' 生成缺少label的提交文件,格式:[缺少passengerCount] WIFIAPTag slice10min 返回类型:dataframe @测试通过 ''' data = pickle2df(datadir) # wifi tag data.info() wifi_ap = pd.Series(data['WIFIAPTag_'].unique(), name = 'WIFIAPTag').to_frame() print "length of wifi_ap: ",len(wifi_ap) # wifi_ap.rename(columns={'WIFIAPTag_': 'WIFIAPTag'}, inplace=True) wifi_ap.info() wifi_ap['key'] = pd.Series(['A'] * wifi_ap['WIFIAPTag'].size) time_line = generate_time_line() # time line time_line['key'] = pd.Series(['A'] * time_line['slice10min'].size) # 做笛卡尔积 sample = pd.merge(wifi_ap, time_line, on='key') sample = sample.drop('key',axis=1) sample.info() # print sample.size # print sample.head(10) return sample def generate_time_line(hour_delta = 3, hour = 15, pre = '2016-09-14'): ''' 生成时间线,以dataframe的形式返回 @测试通过 ''' minute = ['0','1','2','3','4','5'] time_line = [] for i in range(0, hour_delta): time_line.append([(pre + '-' + str(hour + i) + '-' + item) for item in minute]) return pd.Series(list(itertools.chain.from_iterable(time_line)), name = 'slice10min').to_frame() if __name__ == '__main__': pass datadir_test = '../data/test' datadir = '../data' wifi_ap = 'WIFI_AP_Passenger_Records_chusai_1stround.csv' # generate_submit_sample_without_lable(datadir) data = pickle2df(datadir) print data.head(5) data.info()
{ "content_hash": "1656bf85dae51bd48bfbdb29afc4a6ab", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 110, "avg_line_length": 24.283783783783782, "alnum_prop": 0.6733444629938787, "repo_name": "d2Code/Prediction-of-the-spatial-and-temporal-distribution-of-airport-passenger-flow", "id": "f19a032b35cbd6da940b4fac9934d924107d339e", "size": "1936", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "preprocess/utils.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "6348" } ], "symlink_target": "" }
from __future__ import (absolute_import, unicode_literals) """ =========================== Flux Balance Analysis Model =========================== :Authors: Moritz Emanuel Beber Alexandra Mirela Grigore Nils Kölling Nikolaus Sonnenschein :Date: 2011-03-28 :Copyright: Copyright(c) 2011 Jacobs University of Bremen. All rights reserved. :File: fba.py """ __all__ = ["FBAModel"] from future.utils import (python_2_unicode_compatible, with_metaclass) from .lpmodels import MetaLPModelFacade #iAF1260_minimal_medium = ["ca2_b", # "cl_b", # "co2_b", # "cobalt2_b", # "cu2_b", # "fe2_b", # "fe3_b", # "h_b", # "h2o_b", # "k_b", # "mg2_b", # "mn2_b", # "mobd_b", # "na1_b", # "nh4_b", # "pi_b", # "so4_b", # "tungs_b", # "zn2_b", # "cbl1_b"] @python_2_unicode_compatible class FBAModel(with_metaclass(MetaLPModelFacade, object)): """ """ _error = NotImplementedError("method not available for the chosen solver") def __copy__(self): raise self._error def __deepcopy__(self, memo=dict()): raise self._error def copy(self): raise self._error def add_compound(self, compound, coefficients=None): """ Add a new compound to the model. The method can also be used to add multiple compounds at the same time. In that case each argument must be an iterable of equal length and matching positions corresponding to the same compound. Parameters ---------- compound: `BasicCompound` An instance of `BasicCompound`. coefficients: iterable Pairs of `BasicReaction` instances and their stoichiometric coefficient. """ raise self._error def iter_compounds(self, reaction=None, coefficients=False): """ Returns ------- iterator: Iterator over all compounds. """ raise self._error def modify_compound_coefficients(self, compound, coefficients): """ Modify the coefficients of reactions that the compound participates in. This method can be used to introduce the compound into an existing reaction that it previously did not participate in. Parameters ---------- compound: `BasicCompound` or iterable An instance of `BasicCompound`. coefficients: iterable Pairs of `BasicReaction` instances and their stoichiometric coefficient. """ raise self._error def free_compound(self, compound): """ Completely removes any bounds on the compound(s) effectively removing it from the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. """ raise self._error def knockout_compound(self, compound): """ Knocks out the reactions consuming or producing the given compound(s). Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. """ raise self._error def delete_compound(self, compound): """ Completely removes the compound(s) from the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. """ raise self._error def add_reaction(self, reaction, coefficients, lb=None, ub=None): """ Add a new reaction to the model. The method can also be used to add multiple reactions at the same time. In that case each argument must be an iterable of equal length and matching positions corresponding to the same reaction. Parameters ---------- reaction: `BasicReaction` An instance of `BasicReaction`. coefficients: iterable Pairs of `BasicCompound` instances and their stoichiometric coefficient. lb: float (optional) A lower bound on the mass flux through this reaction. A default value can be set in the options. ub: float (optional) An upper bound on the mass flux through this reaction. A default value can be set in the options. """ raise self._error def make_binary(self, reaction): """ Make a reaction variable in the model binary. The method can also be used for multiple reactions at the same time. In that case the argument must be an iterable. Parameters ---------- reaction: `BasicReaction` or iterable An instance of `BasicReaction`. """ raise self._error def make_integer(self, reaction): """ Make a reaction variable in the model integer-only. The method can also be used for multiple reactions at the same time. In that case the argument must be an iterable. Parameters ---------- reaction: `BasicReaction` or iterable An instance of `BasicReaction`. """ raise self._error def iter_reactions(self, compound=None, coefficients=False): """ Parameters ---------- compound: `BasicCompound` (optional) A single `BasicCompound` instance. coefficients: bool (optional) Return also the stoichiometric coefficients of the given compound in the reactions it participates in. Returns ------- iterator: Iterator over all reactions (excluding sources and drains) that `compound` is involved in. """ raise self._error def modify_reaction_coefficients(self, reaction, coefficients): """ Modify the coefficients of compounds in the reaction. This can be used to introduce an existing compound into a reaction that it previously did not take part in. Parameters ---------- reaction: `BasicReaction` or iterable An instance of `BasicReaction`. coefficients: iterable Pairs of `BasicCompound` instances and their stoichiometric coefficient. """ raise self._error def modify_reaction_bounds(self, reaction, lb=None, ub=None): """ Modifies the allowed flux through the reaction(s). The method can also be used to modify multiple reactions at the same time. In that case each argument must be an iterable of equal length and matching positions corresponding to the same reaction. Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. lb: float (optional) A lower bound on the mass flux through this reaction. A default value can be set in the options. ub: float (optional) An upper bound on the mass flux through this reaction. A default value can be set in the options. """ raise self._error def iter_reaction_bounds(self, reaction=None): """ Query the bounds on reactions currently in place. Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. Returns ------- tuple or iterator: The lower and upper bound on a reaction or an iterator over pairs in the same order as the iterable provided. With no argument the returned iterator walks over triples of all present reactions with their respective lower and upper bounds. """ raise self._error def is_fixed(self, reaction): """ Tests whether a reaction's lower and upper bound are equal. Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. """ raise self._error def free_reaction(self, reaction): """ Completely removes any bounds on the reaction(s). Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. """ raise self._error def knockout_reaction(self, reaction): """ Constrains the allowed reaction flux to zero mimicking a dysfunctional reaction or reactions. Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. """ self.modify_reaction_bounds(reaction, lb=0.0, ub=0.0) def delete_reaction(self, reaction): """ Completely removes the reaction(s) from the model. Parameters ---------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. """ raise self._error def add_source(self, compound, lb=None, ub=None): """ Adds a source for a certain compound or compounds to the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. lb: float (optional) A lower bound on the mass flux through this reaction. A default value can be set in the options. ub: float (optional) An upper bound on the mass flux through this reaction. A default value can be set in the options. """ raise self._error def iter_sources(self): """ Returns ------- iterator: Iterator over all compounds that have sources. """ raise self._error def delete_source(self, compound): """ Remove an existing source of a compound or compounds from the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. """ raise self._error def add_drain(self, compound, lb=None, ub=None): """ Adds a drain for a certain compound or compounds to the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. lb: float (optional) A lower bound on the mass flux through this reaction. A default value can be set in the options. ub: float (optional) An upper bound on the mass flux through this reaction. A default value can be set in the options. """ raise self._error def iter_drains(self): """ Returns ------- iterator: Iterator over all compounds that have drains. """ raise self._error def delete_drain(self, compound): """ Remove an existing drain of a compound or compounds from the model. Parameters ---------- compound: iterable or `BasicCompound` A single `BasicCompound` instance or an iterable with multiple ones. """ raise self._error def set_objective_reaction(self, reaction, factor): """ Sets a certain reaction as objective (for FBA). This replaces previous definitions. Parameters: ------- reaction: iterable or `BasicReaction` A single `BasicReaction` instance or an iterable with multiple ones. factor: iterable or float Weight of the reaction in the objective function. """ raise self._error def iter_objective_reaction(self, factor=False): """ Parameters ---------- factor: bool (optional) Causes the returned iterator to run over pairs of reaction and weight in the objective function. Returns ------- iterator: Current reaction(s) that are used as objectives in LP. """ raise self._error def set_medium(self, compound, lb=None, ub=None): """ Modifies the allowed flux of the compound sources thereby specifying the growth medium composition. Parameters ---------- compound: iterable or `BasicCompounds` Iterable of `BasicCompounds` that should be in the growth medium. lb: iterable (optional) Lower bounds on the mass flux of the sources. A default value can be set in the options. ub: iterable (optional) Upper bounds on the mass flux of the sources. A default value can be set in the options. """ raise self._error def fba(self, maximize=True): """ Performs an optimization of the current objective(s) in the model. """ raise self._error def parsimonious_fba(self): """ Performs an optimization of the current objective(s) in the model followed by a minimisation of all other unnecessary fluxes. """ raise self._error def get_objective_value(self, threshold=None): """ Parameters ---------- threshold: float (optional) Value below which the objective value is considered to be zero. By default the model precision is used. Returns ------- float: Flux of the set objective reaction(s). """ raise self._error def iter_flux(self, reaction=None, threshold=None): """ Parameters ---------- reaction: iterable or `BasicReaction` (optional) A single `BasicReaction` instance or an iterable with multiple ones. threshold: float (optional) Value below which a flux value is considered to be zero. By default the model precision is used. Returns ------- iterator: Iterator over pairs of `BasicReaction`s and their flux or just the flux value. """ raise self._error def iter_reduced_cost(self, reaction=None, threshold=None): """ Parameters ---------- reaction: iterable or `BasicReaction` (optional) A single `BasicReaction` instance or an iterable with multiple ones. threshold: float (optional) Value below which a flux value is considered to be zero. By default the model precision is used. Returns ------- iterator: Iterator over pairs of `BasicReaction`s and their reduced cost or just the reduced cost. """ raise self._error def iter_shadow_price(self, compound=None, threshold=None): """ Parameters ---------- compound: iterable or `BasicCompound`s (optional) Iterable of `BasicCompound`s that should be in the growth medium. threshold: float (optional) Value below which a flux value is considered to be zero. By default the model precision is used. Returns ------- iterator: Iterator over pairs of `BasicCompound`s and their shadow price or just shadow price. """ raise self._error def export2lp(self, filename): """ Export the current model to a flat text file in *.lp format. """ raise self._error #def generate_random_medium(transporters, percentage_range=(5, 100), # minimal=list(), transp="_Transp"): # """ # Generates a completely random medium based on a percentage of activated # transporters. # # Parameters: # ------- # transporters: # asdfd # percentage_range: tuple # A random percentage of transporters is considered for the random medium # according to this range. The first of the pair must be smaller than or # equal to the second. # minimal: iterable # Some always active transporters that form a minimal medium that is # extended with random other components. # transp: str # The suffix for transporters in the model. # """ # assert percentage_range[0] <= percentage_range[1] # # # only choose from non-minimal transporters # # -> ensures constant active percentage by preventing overlap # choices = [t for t in transporters if not t in minimal] # # # select a random percentage of active transporters # active = random.sample(choices, int(np.ceil(len(choices) * # random.uniform(*percentage_range) / 100.0))) # # # since bounds is a dictionary we do not care about duplicates # for trns in minimal: # active.append(trns) # # return active # #def set_random_medium(model, default_bound=(20.0, 20.0), # percentage_range=(5, 100), minimal=list(), transp="_Transp"): # """ # Generates and sets a completely random medium based on a percentage # of activated transporters. # # Parameters: # ------- # model: FBAModel # The model is modified directly. # default_bound: tuple # A default upper limit for all components of the medium is chosen from # this range of floating point numbers. The first of the pair must be # smaller than or equal to the second. # percentage_range: tuple # A random percentage of transporters is considered for the random medium # according to this range. The first of the pair must be smaller than or # equal to the second. # minimal: iterable # Some always active transporters that form a minimal medium that is # extended with random other components. # transp: str # The suffix for transporters in the model. # """ # assert default_bound[0] <= default_bound[1] # # medium = generate_random_medium(list(model.get_transporters()), # percentage_range, minimal, transp) # upper = random.uniform(*default_bound) # # return model.set_medium(medium, upper)
{ "content_hash": "a1c54f5ec12bc955ab62e715c27bd541", "timestamp": "", "source": "github", "line_count": 597, "max_line_length": 82, "avg_line_length": 31.2964824120603, "alnum_prop": 0.5905052451295226, "repo_name": "Midnighter/pyorganism", "id": "4118faf83528d6259075b9a838b83038c086a65d", "size": "18711", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pyorganism/metabolism/fba.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "3259" }, { "name": "Python", "bytes": "533487" }, { "name": "Shell", "bytes": "3679" } ], "symlink_target": "" }
import mysql.connector import sqlite3 import json target = sqlite3.connect('data.db') tc = target.cursor() #source = mysql.connector.connect(user = 'garba1', host = 'localhost', database = 'reactome_simple') #sc = source.cursor() source = mysql.connector.connect(user = 'garba1', host = 'localhost', database = 'reactome_complex') sc = source.cursor() def table_exists(table): tc.execute('SELECT name FROM sqlite_master WHERE type="table" AND name=?', (table,)) return tc.fetchone() def assign_id(reactome_id, _type=''): tc.execute('SELECT id FROM objects WHERE reactome_id=?', (reactome_id,)) result = tc.fetchone() if result: return int(result[0]) tc.execute('INSERT INTO objects(reactome_id, type) VALUES (?, ?)', (reactome_id, _type)) tc.execute('SELECT last_insert_rowid()') return int(tc.fetchone()[0]) # Creates or updates an object with the given reactome id. Returns # the canonical id. def ensure_object(data): def get(key): if key in data: return data[key] else: return None reactome_id = get('reactome_id') name = get('name') _type = get('type') subtype = get('subtype') tc.execute('SELECT id FROM objects WHERE reactome_id=?', (reactome_id,)) result = tc.fetchone() if result: return int(result[0]) tc.execute('INSERT INTO objects(reactome_id, name, type, subtype) ' 'VALUES (?, ?, ?, ?)', (reactome_id, name, _type, subtype)) tc.execute('SELECT last_insert_rowid()') return int(tc.fetchone()[0]) # Create table for gauging progress. if not table_exists('state'): tc.execute('CREATE TABLE state(name TEXT PRIMARY KEY, value INTEGER)') tc.execute('INSERT OR IGNORE INTO state VALUES (?, ?)', ('pathways', 0)) tc.execute('INSERT OR IGNORE INTO state VALUES (?, ?)', ('reactions', 0)) tc.execute('INSERT OR IGNORE INTO state VALUES (?, ?)', ('entities', 0)) tc.execute('INSERT OR IGNORE INTO state VALUES (?, ?)', ('locations', 0)) tc.execute('INSERT OR IGNORE INTO state VALUES (?, ?)', ('igep', 0)) def state(key): tc.execute('SELECT value FROM state WHERE name=?', (key,)) value = tc.fetchone() if value: value = int(value[0]) return value def state_set(key, value): tc.execute('UPDATE state SET value=? WHERE name=?', (value, key)) if not table_exists('objects'): tc.execute('CREATE TABLE objects(' ' id INTEGER PRIMARY KEY,' ' type TEXT,' ' subtype TEXT,' ' name TEXT,' ' reactome_id INTEGER UNIQUE)') if not table_exists('pathways'): tc.execute('CREATE TABLE pathways(' ' pathway_id INTEGER,' ' object_id INTEGER,' ' PRIMARY KEY(pathway_id, object_id))') if not table_exists('locations'): tc.execute('CREATE TABLE locations(' ' location_id INTEGER,' ' object_id INTEGER,' ' PRIMARY KEY(location_id, object_id))') if not table_exists('reactions'): tc.execute('CREATE TABLE reactions(' ' reaction_id INTEGER,' ' entity_id INTEGER,' ' direction TEXT,' ' PRIMARY KEY(reaction_id, entity_id))') if not table_exists('sets'): tc.execute('CREATE TABLE sets(' ' set_id INTEGER,' ' component_id INTEGER,' ' PRIMARY KEY(set_id, component_id))') if not table_exists('sources'): tc.execute('CREATE TABLE sources( ' ' object_id INTEGER, ' ' source TEXT)') if not table_exists('uniprot'): tc.execute('CREATE TABLE uniprot( ' ' id INTEGER, ' ' uniprot_id TEXT, ' ' PRIMARY KEY(id, uniprot_id))') if not table_exists('papers'): tc.execute('CREATE TABLE papers( ' ' paper_id INTEGER, ' ' object_id TEXT, ' ' PRIMARY KEY(paper_id, object_id))') ################################################################ # Pathways pathways = {} # id -> reactome_id pathways_r = {} # reactome_id -> id if 0 >= state('pathways'): # Delete all pathways. tc.execute('DELETE FROM objects WHERE type="pathway"') # First, read in the pathways json file. pathway_file = open('human_pathways.json', 'r') def add_pathway(node): pathway_reactome_id = int(node['dbId']) name = node['name'] tc.execute('INSERT INTO objects(type, name, reactome_id) ' 'VALUES ("pathway", ?, ?)', (name, pathway_reactome_id)) tc.execute('SELECT last_insert_rowid()') pathway_id = int(tc.fetchone()[0]) pathways[pathway_id] = pathway_reactome_id pathways_r[pathway_reactome_id] = pathway_id for child in node['children']: if int(child['dbId']) not in pathways_r: add_pathway(child) tc.execute('INSERT OR IGNORE INTO pathways(pathway_id, object_id) VALUES (?, ?)', (pathway_id, pathways_r[int(child['dbId'])])) add_pathway(json.load(pathway_file)) pathway_file.close() state_set('pathways', 1) target.commit() else: tc.execute('SELECT id, reactome_id FROM objects WHERE type="pathway"'); for (pathway_id, pathway_reactome_id) in tc: pathways[pathway_id] = pathway_reactome_id ################################################################ # Reactions reactions = {} # id -> reactome_id reaction_pathways = {} # reaction -> [pathway] if 0 >= state('reactions'): # Delete all reactions. tc.execute('DELETE FROM objects WHERE type="reaction"') tc.execute('DELETE FROM pathways ' 'WHERE pathway_id IN (' ' SELECT p.pathway_id ' ' FROM pathways p ' ' INNER JOIN objects o ' ' ON p.object_id=o.id ' ' WHERE o.type="reaction")') # Loop through pathways, grabbing their reactions. for pathway_id in pathways: pathway_reactome_id = pathways[pathway_id] # Get reactions in pathway. sc.execute('SELECT event.hasEvent, event.hasEvent_class, name.name ' 'FROM Pathway_2_hasEvent event ' ' INNER JOIN Event_2_name name ' ' ON event.hasEvent=name.DB_ID ' 'WHERE event.DB_ID=%s ' 'AND event.hasEvent_class <> "Pathway"', (pathway_reactome_id,)) for (reaction_reactome_id, subtype, name) in sc: # Create if needed. reaction_id = assign_id(reaction_reactome_id, 'reaction') if reaction_id not in reactions: tc.execute('UPDATE objects SET ' ' subtype=?,' ' name=? ' 'WHERE reactome_id=?', (subtype, name, reaction_reactome_id)) reactions[reaction_id] = reaction_reactome_id reaction_pathways[reaction_id] = [] # Put reaction in pathway. tc.execute('INSERT OR IGNORE INTO pathways VALUES (?, ?)', (pathway_id, reaction_id)) reaction_pathways[reaction_id].append(pathway_id) state_set('reactions', 1) target.commit() else: tc.execute('SELECT id, reactome_id FROM objects WHERE type="reaction"'); for (reaction_id, reaction_reactome_id) in tc: reactions[reaction_id] = reaction_reactome_id reaction_pathways[reaction_id] = [] tc.execute('SELECT p.pathway_id, p.object_id ' 'FROM pathways p ' ' INNER JOIN objects o ' ' ON p.object_id=o.id ' 'WHERE o.type="reaction"') for (pathway_id, reaction_id) in tc: reaction_pathways[reaction_id].append(pathway_id) ################################################################ # Entities entities = {} # id -> reactome_id entities_2 = {} # reactome_id -> id entity_reactions = {} # enttiy -> [reaction] accessioned_type = { 'ReferenceGeneProduct': 'protein', 'ReferenceIsoform': 'protein', 'ReferenceRNASequence': 'rna', 'ReferenceDNASequence': 'dna', 'ReferenceSequence': 'sequence', 'None': 'none'} if 0 >= state('entities'): # Delete all entities. tc.execute('DELETE FROM objects WHERE type="entity"') # Loop through reactions, grabbing their entities. for reaction_id in reactions: reaction_reactome_id = reactions[reaction_id] # input sc.execute('SELECT input ' 'FROM ReactionlikeEvent_2_input ' 'WHERE DB_ID=%s', (reaction_reactome_id,)) for (entity_reactome_id,) in sc: # Create if needed. entity_id = assign_id(entity_reactome_id, 'entity') if entity_id not in entities: entities[entity_id] = entity_reactome_id entity_reactions[entity_id] = [] # Add entity to reaction. tc.execute('INSERT OR IGNORE INTO reactions VALUES (?, ?, "input")', (reaction_id, entity_id)) entity_reactions[entity_id].append(reaction_id) # output sc.execute('SELECT output ' 'FROM ReactionlikeEvent_2_output ' 'WHERE DB_ID=%s', (reaction_reactome_id,)) for (entity_reactome_id,) in sc: # Create if needed. entity_id = assign_id(entity_reactome_id, 'entity') if entity_id not in entities: entities[entity_id] = entity_reactome_id entity_reactions[entity_id] = [] # Add entity to reaction. tc.execute('INSERT OR IGNORE INTO reactions VALUES (?, ?, "output")', (reaction_id, entity_id)) entity_reactions[entity_id].append(reaction_id) def add_set(entity_id): entity_reactome_id = entities[entity_id] sc.execute('SELECT DB_ID FROM EntitySet WHERE DB_ID=%s', (entity_reactome_id,)) if not sc.fetchone(): return sc.execute('SELECT hasMember, hasMember_class ' 'FROM EntitySet_2_hasMember ' 'WHERE DB_ID=%s ', (entity_reactome_id,)) added = [] for (sub_entity_reactome_id, sub_entity_type) in sc: sub_entity_id = assign_id(sub_entity_reactome_id, 'entity') if sub_entity_id not in entities: entities[sub_entity_id] = sub_entity_reactome_id entity_reactions[sub_entity_id] = [] added.append(sub_entity_id) tc.execute('INSERT OR IGNORE INTO sets VALUES (?, ?)', (entity_id, sub_entity_id)) for sub_entity_id in added: add_set(sub_entity_id) # Loop through entities, looking for sets. for entity_id in entities.copy(): add_set(entity_id) # Grab cross-reference ids. for entity_id in entities: entity_reactome_id = entities[entity_id] sc.execute('SELECT b.identifier ' 'FROM EntityWithAccessionedSequence a ' ' INNER JOIN ReferenceEntity b ON a.referenceEntity=b.DB_ID ' 'WHERE a.DB_ID=%s AND b.referenceDatabase=2', (entity_reactome_id,)) uniprot_id = sc.fetchone() if uniprot_id: tc.execute('INSERT OR IGNORE INTO uniprot VALUES (?, ?)', (entity_id, uniprot_id[0])) state_set('entities', 1) target.commit() else: tc.execute('SELECT id, reactome_id FROM objects WHERE type="entity"'); for (entity_id, entity_reactome_id) in tc: entities[entity_id] = entity_reactome_id ################################################################ # Locations locations = {} # id -> reactome_id if 0 >= state('locations'): # Loop through entities, grabbing their location for entity_id in entities: entity_reactome_id = entities[entity_id] sc.execute('SELECT p.compartment, n.name ' 'FROM PhysicalEntity_2_compartment p ' ' INNER JOIN GO_CellularComponent_2_name n ' ' ON p.compartment=n.DB_ID ' 'WHERE p.DB_ID=%s AND n.name_rank=0', (entity_reactome_id,)) for (location_reactome_id, location_name) in sc: # Create if needed. location_id = ensure_object({ 'reactome_id': location_reactome_id, 'name': location_name, 'type': 'location'}) if location_id not in locations: locations[location_id] = location_reactome_id # Register entity in location. tc.execute('INSERT OR IGNORE INTO locations VALUES (?, ?)', (location_id, entity_id)) state_set('locations', 1) target.commit() ################################################################ # Entities Again if 1 >= state('entities'): # Loop through entities, doing special processing for each type. for entity_id in entities: entity_reactome_id = entities[entity_id] # Accessioned Sequences sc.execute('SELECT referenceEntity_class FROM EntityWithAccessionedSequence ' 'WHERE DB_ID=%s', (entity_reactome_id,)) x = sc.fetchone() if x is not None: subtype = accessioned_type[x[0]] tc.execute('UPDATE objects SET subtype=? WHERE id=?', (subtype, entity_id)) continue # Check for it being a genome encoded entity only, give it "unknown" tag. sc.execute('SELECT DB_ID FROM GenomeEncodedEntity WHERE DB_ID=%s', (entity_reactome_id,)) if sc.fetchone(): tc.execute('UPDATE objects SET subtype="unknown" WHERE id=?', (entity_id,)) continue # Simple Entity sc.execute('SELECT DB_ID FROM SimpleEntity WHERE DB_ID=%s', (entity_reactome_id,)) if sc.fetchone(): tc.execute('UPDATE objects SET subtype="simple" WHERE id=?', (entity_id,)) continue # Complex Entities sc.execute('SELECT DB_ID FROM Complex WHERE DB_ID=%s', (entity_reactome_id,)) if sc.fetchone(): tc.execute('UPDATE objects SET subtype="complex" WHERE id=?', (entity_id,)) continue # Entity Sets sc.execute('SELECT DB_ID FROM EntitySet WHERE DB_ID=%s', (entity_reactome_id,)) if sc.fetchone(): # Mark as a set. tc.execute('UPDATE objects SET subtype="set" WHERE id=?', (entity_id,)) continue # Assign "other" tc.execute('UPDATE objects SET subtype="other" WHERE id=?', (entity_id,)) state_set('entities', 2) target.commit() if 2 >= state('entities'): # Put entities in their reactions' pathways. tc.execute('INSERT OR IGNORE INTO pathways(pathway_id, object_id) ' ' SELECT ' ' p.pathway_id AS pathway_id, ' ' o.id AS object_id ' ' FROM objects o ' ' INNER JOIN reactions r ON o.id=r.entity_id ' ' INNER JOIN pathways p ON r.reaction_id=p.object_id ' ' WHERE o.type="entity"') def add_set(entity_id): # Get pathways for entity. tc.execute('SELECT pathway_id FROM pathways WHERE object_id=?', (entity_id,)) pathways = [] for (pathway,) in tc: pathways.append(pathway) # Get components for entity. components = [] tc.execute('SELECT component_id FROM sets WHERE set_id=?', (entity_id,)) for (component,) in tc: components.append(component) for component in components: for pathway in pathways: tc.execute('INSERT OR IGNORE INTO pathways VALUES(?, ?)', (pathway, component)) add_set(component) # Loop through entities, assigning sets to their papent's pathways. for entity_id in entities: add_set(entity_id) # Loop through entities, assigning a name. for entity_id in entities: entity_reactome_id = entities[entity_id] sc.execute('SELECT name FROM PhysicalEntity_2_name ' 'WHERE DB_ID=%s AND name_rank=0', (entity_reactome_id,)) name = sc.fetchone()[0] tc.execute('UPDATE objects SET name=? WHERE id=?', (name, entity_id)) state_set('entities', 3) target.commit() # Register reactome as source for all objects. tc.execute('INSERT INTO sources ' ' SELECT id, "reactome" FROM objects ') target.commit()
{ "content_hash": "9e4fadc8f4119be64a522cd90930dee2", "timestamp": "", "source": "github", "line_count": 475, "max_line_length": 100, "avg_line_length": 33.44421052631579, "alnum_prop": 0.5886944479415838, "repo_name": "garba1/GraphMirrors", "id": "3a36fef21c4549f11b2753c5d71daea4d5d60b2d", "size": "15909", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/database2/create.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "32213" }, { "name": "HTML", "bytes": "8434" }, { "name": "JavaScript", "bytes": "588708" }, { "name": "PHP", "bytes": "4090" }, { "name": "Python", "bytes": "71697" } ], "symlink_target": "" }
import numpy as np from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_equal) import pytest from mne.fixes import is_regressor, is_classifier from mne.utils import requires_version from mne.decoding.base import (_get_inverse_funcs, LinearModel, get_coef, cross_val_multiscore) from mne.decoding.search_light import SlidingEstimator from mne.decoding import Scaler def _make_data(n_samples=1000, n_features=5, n_targets=3): """Generate some testing data. Parameters ---------- n_samples : int The number of samples. n_features : int The number of features. n_targets : int The number of targets. Returns ------- X : ndarray, shape (n_samples, n_features) The measured data. Y : ndarray, shape (n_samples, n_targets) The latent variables generating the data. A : ndarray, shape (n_features, n_targets) The forward model, mapping the latent variables (=Y) to the measured data (=X). """ # Define Y latent factors np.random.seed(0) cov_Y = np.eye(n_targets) * 10 + np.random.rand(n_targets, n_targets) cov_Y = (cov_Y + cov_Y.T) / 2. mean_Y = np.random.rand(n_targets) Y = np.random.multivariate_normal(mean_Y, cov_Y, size=n_samples) # The Forward model A = np.random.randn(n_features, n_targets) X = Y.dot(A.T) X += np.random.randn(n_samples, n_features) # add noise X += np.random.rand(n_features) # Put an offset return X, Y, A @requires_version('sklearn', '0.17') def test_get_coef(): """Test getting linear coefficients (filters/patterns) from estimators.""" from sklearn.base import TransformerMixin, BaseEstimator from sklearn.pipeline import make_pipeline from sklearn.preprocessing import StandardScaler from sklearn.linear_model import Ridge, LinearRegression lm = LinearModel() assert (is_classifier(lm)) lm = LinearModel(Ridge()) assert (is_regressor(lm)) # Define a classifier, an invertible transformer and an non-invertible one. class Clf(BaseEstimator): def fit(self, X, y): return self class NoInv(TransformerMixin): def fit(self, X, y): return self def transform(self, X): return X class Inv(NoInv): def inverse_transform(self, X): return X X, y, A = _make_data(n_samples=2000, n_features=3, n_targets=1) # I. Test inverse function # Check that we retrieve the right number of inverse functions even if # there are nested pipelines good_estimators = [ (1, make_pipeline(Inv(), Clf())), (2, make_pipeline(Inv(), Inv(), Clf())), (3, make_pipeline(Inv(), make_pipeline(Inv(), Inv()), Clf())), ] for expected_n, est in good_estimators: est.fit(X, y) assert (expected_n == len(_get_inverse_funcs(est))) bad_estimators = [ Clf(), # no preprocessing Inv(), # final estimator isn't classifier make_pipeline(NoInv(), Clf()), # first step isn't invertible make_pipeline(Inv(), make_pipeline( Inv(), NoInv()), Clf()), # nested step isn't invertible ] for est in bad_estimators: est.fit(X, y) invs = _get_inverse_funcs(est) assert_equal(invs, list()) # II. Test get coef for simple estimator and pipelines for clf in (lm, make_pipeline(StandardScaler(), lm)): clf.fit(X, y) # Retrieve final linear model filters = get_coef(clf, 'filters_', False) if hasattr(clf, 'steps'): coefs = clf.steps[-1][-1].model.coef_ else: coefs = clf.model.coef_ assert_array_equal(filters, coefs[0]) patterns = get_coef(clf, 'patterns_', False) assert (filters[0] != patterns[0]) n_chans = X.shape[1] assert_array_equal(filters.shape, patterns.shape, [n_chans, n_chans]) # Inverse transform linear model filters_inv = get_coef(clf, 'filters_', True) assert (filters[0] != filters_inv[0]) patterns_inv = get_coef(clf, 'patterns_', True) assert (patterns[0] != patterns_inv[0]) # Check with search_light and combination of preprocessing ending with sl: slider = SlidingEstimator(make_pipeline(StandardScaler(), lm)) X = np.transpose([X, -X], [1, 2, 0]) # invert X across 2 time samples clfs = (make_pipeline(Scaler(None, scalings='mean'), slider), slider) for clf in clfs: clf.fit(X, y) for inverse in (True, False): patterns = get_coef(clf, 'patterns_', inverse) filters = get_coef(clf, 'filters_', inverse) assert_array_equal(filters.shape, patterns.shape, X.shape[1:]) # the two time samples get inverted patterns assert_equal(patterns[0, 0], -patterns[0, 1]) for t in [0, 1]: assert_array_equal(get_coef(clf.estimators_[t], 'filters_', False), filters[:, t]) # Check patterns with more than 1 regressor for n_features in [1, 5]: for n_targets in [1, 3]: X, Y, A = _make_data(n_samples=5000, n_features=5, n_targets=3) lm = LinearModel(LinearRegression()).fit(X, Y) assert_array_equal(lm.filters_.shape, lm.patterns_.shape) assert_array_equal(lm.filters_.shape, [3, 5]) assert_array_almost_equal(A, lm.patterns_.T, decimal=2) lm = LinearModel(Ridge(alpha=1)).fit(X, Y) assert_array_almost_equal(A, lm.patterns_.T, decimal=2) # Check can pass fitting parameters lm.fit(X, Y, sample_weight=np.ones(len(Y))) @requires_version('sklearn', '0.15') def test_linearmodel(): """Test LinearModel class for computing filters and patterns.""" from sklearn.linear_model import LinearRegression np.random.seed(42) clf = LinearModel() n, n_features = 20, 3 X = np.random.rand(n, n_features) y = np.arange(n) % 2 clf.fit(X, y) assert_equal(clf.filters_.shape, (n_features,)) assert_equal(clf.patterns_.shape, (n_features,)) pytest.raises(ValueError, clf.fit, np.random.rand(n, n_features, 99), y) # check multi-target fit n_targets = 5 clf = LinearModel(LinearRegression()) Y = np.random.rand(n, n_targets) clf.fit(X, Y) assert_equal(clf.filters_.shape, (n_targets, n_features)) assert_equal(clf.patterns_.shape, (n_targets, n_features)) pytest.raises(ValueError, clf.fit, X, np.random.rand(n, n_features, 99)) @requires_version('sklearn', '0.18') def test_cross_val_multiscore(): """Test cross_val_multiscore for computing scores on decoding over time.""" from sklearn.model_selection import KFold, StratifiedKFold, cross_val_score from sklearn.linear_model import LogisticRegression, LinearRegression # compare to cross-val-score X = np.random.rand(20, 3) y = np.arange(20) % 2 clf = LogisticRegression() cv = KFold(2, random_state=0) assert_array_equal(cross_val_score(clf, X, y, cv=cv), cross_val_multiscore(clf, X, y, cv=cv)) # Test with search light X = np.random.rand(20, 4, 3) y = np.arange(20) % 2 clf = SlidingEstimator(LogisticRegression(), scoring='accuracy') scores_acc = cross_val_multiscore(clf, X, y, cv=cv) assert_array_equal(np.shape(scores_acc), [2, 3]) # check values scores_acc_manual = list() for train, test in cv.split(X, y): clf.fit(X[train], y[train]) scores_acc_manual.append(clf.score(X[test], y[test])) assert_array_equal(scores_acc, scores_acc_manual) # check scoring metric # raise an error if scoring is defined at cross-val-score level and # search light, because search light does not return a 1-dimensional # prediction. pytest.raises(ValueError, cross_val_multiscore, clf, X, y, cv=cv, scoring='roc_auc') clf = SlidingEstimator(LogisticRegression(), scoring='roc_auc') scores_auc = cross_val_multiscore(clf, X, y, cv=cv, n_jobs=1) scores_auc_manual = list() for train, test in cv.split(X, y): clf.fit(X[train], y[train]) scores_auc_manual.append(clf.score(X[test], y[test])) assert_array_equal(scores_auc, scores_auc_manual) # indirectly test that cross_val_multiscore rightly detects the type of # estimator and generates a StratifiedKFold for classiers and a KFold # otherwise X = np.random.randn(1000, 3) y = np.r_[np.zeros(500), np.ones(500)] clf = LogisticRegression(random_state=0) reg = LinearRegression() for cross_val in (cross_val_score, cross_val_multiscore): manual = cross_val(clf, X, y, cv=StratifiedKFold(2)) auto = cross_val(clf, X, y, cv=2) assert_array_equal(manual, auto) pytest.raises(ValueError, cross_val, clf, X, y, cv=KFold(2)) manual = cross_val(reg, X, y, cv=KFold(2)) auto = cross_val(reg, X, y, cv=2) assert_array_equal(manual, auto)
{ "content_hash": "fb6ae173244ab7dd57730d94311df66d", "timestamp": "", "source": "github", "line_count": 246, "max_line_length": 79, "avg_line_length": 36.9349593495935, "alnum_prop": 0.6222760290556901, "repo_name": "teonlamont/mne-python", "id": "7f6263e1de344778ec73734661f9861fb33c4391", "size": "9220", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "mne/decoding/tests/test_base.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "3117" }, { "name": "PowerShell", "bytes": "2988" }, { "name": "Python", "bytes": "4354605" }, { "name": "Shell", "bytes": "936" } ], "symlink_target": "" }
"""Encode and decode BASE58, P2PKH and P2SH addresses.""" from .script import hash256, hash160, sha256, CScript, OP_0 from .util import bytes_to_hex_str, hex_str_to_bytes from . import segwit_addr chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' def byte_to_base58(b, version): result = '' str = bytes_to_hex_str(b) str = bytes_to_hex_str(chr(version).encode('latin-1')) + str checksum = bytes_to_hex_str(hash256(hex_str_to_bytes(str))) str += checksum[:8] value = int('0x'+str,0) while value > 0: result = chars[value % 58] + result value //= 58 while (str[:2] == '00'): result = chars[0] + result str = str[2:] return result # TODO: def base58_decode def keyhash_to_p2pkh(hash, main = False): assert (len(hash) == 20) version = 0 if main else 111 return byte_to_base58(hash, version) def scripthash_to_p2sh(hash, main = False): assert (len(hash) == 20) version = 5 if main else 58 return byte_to_base58(hash, version) def key_to_p2pkh(key, main = False): key = check_key(key) return keyhash_to_p2pkh(hash160(key), main) def script_to_p2sh(script, main = False): script = check_script(script) return scripthash_to_p2sh(hash160(script), main) def key_to_p2sh_p2wpkh(key, main = False): key = check_key(key) p2shscript = CScript([OP_0, hash160(key)]) return script_to_p2sh(p2shscript, main) def program_to_witness(version, program, main = False): if (type(program) is str): program = hex_str_to_bytes(program) assert 0 <= version <= 16 assert 2 <= len(program) <= 40 assert version > 0 or len(program) in [20, 32] return segwit_addr.encode("DRC" if main else "rDRC", version, program) def script_to_p2wsh(script, main = False): script = check_script(script) return program_to_witness(0, sha256(script), main) def key_to_p2wpkh(key, main = False): key = check_key(key) return program_to_witness(0, hash160(key), main) def script_to_p2sh_p2wsh(script, main = False): script = check_script(script) p2shscript = CScript([OP_0, sha256(script)]) return script_to_p2sh(p2shscript, main) def check_key(key): if (type(key) is str): key = hex_str_to_bytes(key) # Assuming this is hex string if (type(key) is bytes and (len(key) == 33 or len(key) == 65)): return key assert(False) def check_script(script): if (type(script) is str): script = hex_str_to_bytes(script) # Assuming this is hex string if (type(script) is bytes or type(script) is CScript): return script assert(False)
{ "content_hash": "9a7fce92eb44f644c250a1247bd43dfa", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 74, "avg_line_length": 31.759036144578314, "alnum_prop": 0.6498482549317147, "repo_name": "doriancoins/doriancoin", "id": "b24d5455f1b4e7e2278de82265452959c914dff4", "size": "2853", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/functional/test_framework/address.py", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "28453" }, { "name": "C", "bytes": "685584" }, { "name": "C++", "bytes": "5472199" }, { "name": "HTML", "bytes": "21860" }, { "name": "Java", "bytes": "30314" }, { "name": "M4", "bytes": "196107" }, { "name": "Makefile", "bytes": "114818" }, { "name": "Objective-C", "bytes": "2171" }, { "name": "Objective-C++", "bytes": "6765" }, { "name": "Python", "bytes": "1309467" }, { "name": "QMake", "bytes": "759" }, { "name": "Shell", "bytes": "66893" } ], "symlink_target": "" }
""" A module with implemented quaternion arithemtics. Author: Jernej Kovacic """ import math import exception from instance_checker import InstanceCheck class QuaternionException(exception.IException) : """Exception raised at illegal quaternion operations""" pass class Quaternion() : """ This class implements quaternion arithmetics, e.g. basic operations, norm, etc. """ # Internal instance members: # o - quaternion's scalar component # i - quaternion's 'i' component # j - quaternion's 'j' component # k - quaternion's 'k' component """Tolerance for determination whether a number is "close enough" to zero""" eps = 1e-12 def __init__(self, o=0.0, i=0.0, j=0.0, k=0.0) : """ A "constructor" that creates an instance of a quaternion and assigns values to its components. Input: o - scalar component of the quaternion (default: 0) i - component 'i' of the quaternion (default: 0) j - component 'j' of the quaternion (default: 0) k - component 'k' of the quaternion (default: 0) Alternatively 'o' may be a quaternion. In this case, its components are copied into self's ones and all other input arguments are ignored. A QuaternionExceptionxception is raised if any argument is not an instance of supported types (int, float, Quaternion). """ # all functionality is already implemented by setQ self.setQ(o, i, j, k) def setQ(self, o=0.0, i=0.0, j=0.0, k=0.0) : """ Assigns values of all quaternion's components. Input: o - scalar component of the quaternion (default: 0) i - component 'i' of the quaternion (default: 0) j - component 'j' of the quaternion (default: 0) k - component 'k' of the quaternion (default: 0) Alternatively 'o' may be a quaternion. In this case, its components are copied into self's ones and all other input arguments are ignored. A QuaternionExceptionxception is raised if any argument is not an instance of supported types (int, float, Quaternion). """ if Quaternion.isQuaternion(o) : # If o is a quaternion, simply copy its components... self.o = o.o self.i = o.i self.j = o.j self.k = o.k else: # otherwise check if all inputs are floats or integers... if not ( InstanceCheck.isFloat(o) and InstanceCheck.isFloat(i) and InstanceCheck.isFloat(j) and InstanceCheck.isFloat(k) ) : raise QuaternionException("Invalid input arguments") # if they are, just assign their values to quaternion's components self.o = o; self.i = i; self.j = j; self.k = k; return self def getScalar(self) : """Returns the scalar component of the quaternion""" return self.o def getI(self) : """Returns the component 'i' of the quaternion""" return self.i def getJ(self) : """Returns the component 'j' of the quaternion""" return self.j def getK(self) : """Returns the component 'k' of the quaternion""" return self.k def setScalar(self, o=0.0) : """ Assigns the scalar component of the quaternion. Input: o - value of the scalar component (default: 0) Raises a QuaternionException if 'o' is not an instance of a supported type (float or int). """ if not InstanceCheck.isFloat(o) : raise QuaternionException("Invalid input argument") self.o = o return self def setI(self, i=0.0) : """ Assigns the component 'i' of the quaternion. Input: i - value of the component 'i' (default: 0) Raises a QuaternionException if 'i' is not an instance of a supported type (float or int). """ if not InstanceCheck.isFloat(i) : raise QuaternionException("Invalid input argument") self.i = i; return self def setJ(self, j=0.0) : """ Assigns the component 'j' of the quaternion. Input: j - value of the component 'j' (default: 0) Raises a QuaternionException if 'j' is not an instance of a supported type (float or int). """ if not InstanceCheck.isFloat(j) : raise QuaternionException("Invalid input argument") self.j = j return self def setK(self, k=0.0) : """ Assigns the component 'k' of the quaternion. Input: k - value of the component 'k' (default: 0) Raises a QuaternionException if 'k' is not an instance of a supported type (float or int). """ if not InstanceCheck.isFloat(k) : raise QuaternionException("Invalid input argument") self.k = k return self def __add__(self, q) : """ Implementation of the addition operator '+' of two quaternions. Input: q - quaternion or a float value to be added to this one Return: a new instance of Quaternion A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # Addition of quaternions is trivial: # # (a1 + b1*i + c1*j + d1*k) + (a2 + b2*i + c2*j + d2*k) = # = ( (a1+a2) + (b1+b2)*i + (c1+c2)*j + (d1+d2)*k ) if Quaternion.isQuaternion(q) : return Quaternion( self.o + q.o, self.i + q.i, self.j + q.j, self.k + q.k ) elif InstanceCheck.isFloat(q) : return Quaternion( self.o + q, self.i, self.j, self.k ) else: raise QuaternionException("Input must be a quaternion or a float") def __sub__(self, q) : """ Implementation of the subtraction operator '-' of two quaternions. Input: q - quaternion or a float value to be subtracted from this one Return: a new instance of Quaternion A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # Subtraction of quaternions is trivial: # # (a1 + b1*i + c1*j + d1*k) - (a2 + b2*i + c2*j + d2*k) = # = ( (a1-a2) + (b1-b2)*i + (c1-c2)*j + (d1-d2)*k ) if Quaternion.isQuaternion(q) : return Quaternion( self.o - q.o, self.i - q.i, self.j - q.j, self.k - q.k ) elif InstanceCheck.isFloat(q) : return Quaternion( self.o - q, self.i, self.j, self.k ) else: raise QuaternionException("Input must be a quaternion or a float") def __mul__(self, q) : """ Implementation of the multiplication operator '*' of two quaternions. Note that multiplication of quaternions is not commutative: (p*q != q*p) Input: q - quaternion or a float value to be multiplied by this one Return: a new instance of Quaternion A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # From the following definitions: # i*i = j*j = k*k = -1, # i*j = k, j*i = -k, j*k = i, k*j = -i, k*i = j and i*k = -j, # the following formula can be quickly derived: # # (a1 + b1*i + c1*j + d1*k) * (a2 + b2*i + c2*j + d2*k) = # = (a1*a2 - b1*b2 - c1*c2 - d1*d2) + # + (a1*b2 + b1*a2 + c1*d2 - d1*c2) * i + # + (a1*c2 - b1*d2 + c1*a2 + d1*b2) * j + # + (a1*d2 + b1*c2 - c1*b2 + d1*a2) * k # # Note: The following script for GNU Octave or Matlab can be used # for a quick unit test of the function: # http://mind.cog.jhu.edu/courses/680/octave/Installers/Octave/Octave.OSX10.6/Applications/MATLAB_R2009b.app/toolbox/aero/aero/quatmultiply.m if Quaternion.isQuaternion(q) : return Quaternion( self.o * q.o - self.i * q.i - self.j * q.j - self.k * q.k, self.o * q.i + self.i * q.o + self.j * q.k - self.k * q.j, self.o * q.j - self.i * q.k + self.j * q.o + self.k * q.i, self.o * q.k + self.i * q.j - self.j * q.i + self.k * q.o ) elif InstanceCheck.isFloat(q) : return Quaternion( self.o * q, self.i * q, self.j * q, self.k * q ) else: raise QuaternionException("Input must be a quaternion or a float") def __iadd__(self, q) : """ Addition operator (+=) that adds a quaternion to this one and assigns the sum to itself. Input: q - quaternion or a float value to be added to this one Return: a reference to itself A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # For a definition of quaternion addition, see __add__ if Quaternion.isQuaternion(q) : self.o += q.o self.i += q.i self.j += q.j self.k += q.k elif InstanceCheck.isFloat(q) : self.o += q else: raise QuaternionException("Input must be a quaternion or a float") return self def __isub__(self, q) : """ Subtraction operator (-=) that subtracts a quaternion from this one and assigns the difference to itself. Input: q - quaternion or a float value to be subtracted from this one Return: a reference to itself A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # For a definition of quaternion subtraction, see __sub__ if Quaternion.isQuaternion(q) : self.o -= q.o self.i -= q.i self.j -= q.j self.k -= q.k elif InstanceCheck.isFloat(q) : self.o -= q else: raise QuaternionException("Input must be a quaternion or a float") return self def __imul__(self, q) : """ Multiplication operator (*=) that multiplies this by a quaternion and assigns the product to itself. Input: q - quaternion or a float value to be multiplied to this one Return: a reference to itself A QuaternionException is raised if 'q' is not an instance of Quaternion, float or int. """ # For a definition of quaternion multiplication, see __mul__ if Quaternion.isQuaternion(q) : # From maintanance poit of view, this would # a more elegant solution: #qaux = self * q; #self.o = qaux.o; #self.i = qaux.i #self.j = qaux.j #self.k = qaux.k # However, this one slightly reduces overhead with # instantiation and destruction of another instance of Quaternion: self.o, self.i, self.j, self. k = \ self.o * q.o - self.i * q.i - self.j * q.j - self.k * q.k, \ self.o * q.i + self.i * q.o + self.j * q.k - self.k * q.j, \ self.o * q.j - self.i * q.k + self.j * q.o + self.k * q.i, \ self.o * q.k + self.i * q.j - self.j * q.i + self.k * q.o elif InstanceCheck.isFloat(q) : self.o *= q self.i *= q self.j *= q self.k *= q else: raise QuaternionException("Input must be a quaternion or a float") return self def __neg__(self) : """ Unary negation operator (-). Return: negated -self (all components are negated) """ return Quaternion( -self.o, -self.i, -self.j, -self.k ) def conj(self) : """ Conjugation of a quaternion, i.e. components 'i', 'j' an 'k' are negated. Return: conjugation of self """ return Quaternion( self.o, -self.i, -self.j, -self.k ) def __sqsum(self) : # An auxiliary method that calculates the sum of all components' squares return self.o*self.o + self.i*self.i + self.j*self.j + self.k*self.k def norm(self) : """ Norm of a quaternion, i.e. a square root of the sum of all components' squares """ return math.sqrt(self.__sqsum()) def reciprocal(self) : """ Reciprocal of a quaternion (q^(-1)), satisfying condition: q*q^(-1) + q^(-1)*q = 1. A QuaternionException is raised if quaternion's norm equals 0. """ # Reciprocal of q is defined as: # # q^(-1) = q* / ||q||^2 # # The following formula can be derived from it: # # a - b*i - c*j - d*k # (a+b*i+c*j+d*k)^(-1) = ------------------------- # a^2 + b^2 + c^2 + d^2 nsq = self.__sqsum() if nsq < Quaternion.eps : raise QuaternionException("Reciprocal of a zero-quaternion does not exist") return Quaternion( self.o / nsq, -self.i / nsq, -self.j / nsq, -self.k / nsq ) def unit(self) : """ A unit quaternion of 'self', i.e. it norm is equal to 1. A QuaternionException is raised if quaternion's norm equals 0. """ n = self.norm() if n < Quaternion.eps : raise QuaternionException("Cannot normalize a zero-quaternion") return Quaternion( self.o / n, self.i / n, self.j / n, self.k / n ) def __str__(self) : """ "Nicely" formatted output of the quaternion (e.g. 4-5i+7j-3k). The method is called by print(). """ # Primarily the method was introduced for brief unit testing purposes # and not much effort was invested into a visually "nice" output outstr = str(self.o) if self.i >= 0 : outstr += '+' outstr += str(self.i) + 'i' if self.j >= 0 : outstr += '+' outstr += str(self.j) + 'j' if self.k >= 0 : outstr += '+' outstr += str(self.k) + 'k' return outstr @staticmethod def isQuaternion(q) : """Is 'q' an instance of Quaternion""" return isinstance(q, Quaternion)
{ "content_hash": "40001028fd7cafb700289f641ddf752b", "timestamp": "", "source": "github", "line_count": 504, "max_line_length": 149, "avg_line_length": 30.628968253968253, "alnum_prop": 0.5085184945261385, "repo_name": "jkovacic/py-quat-rotation", "id": "0e7c8f4ca46da77b3bff5c9e1e77429187a6d9e5", "size": "16013", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "quaternion.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "33683" } ], "symlink_target": "" }
import numpy, bpy, sys ## Checkalive Function ## --------------------------------------------------------- ## Desc: Given a 3D matrix and a point in said matrix, ## run a modified game of life simulation, inspecting ## all the points around point x,y,z ## --------------------------------------------------------- ## Input: matrix: A 3D Numpy array of booleans ## size: Int defining size (x,y,z) of matrix ## x,y,z: Pretty self explanatory ## --------------------------------------------------------- ## Output: A value for the next iteration of point x,y,z ## --------------------------------------------------------- def checkalive(matrix, size, x, y, z): count = 0 for i in range(x-1, x+2, 1): for j in range(y-1, y+2, 1): for k in range(z-1, z+2, 1): if i == x and j == y and k == z: pass else: ## Wrap around if i == size: i = 0 if j == size: j = 0 if k == size: k = 0 count += int(matrix[i,j,k]) ### Rules for Dead Points if matrix[x,y,z] == False: ## If 10 exactly are alive, come to life #if count == 4: if count >=2 and count <=4: return True else: return False ### Rules for Live Points else: if count >= 3 and count <= 8: return True else: return False ## Iterate Function ## --------------------------------------------------------- ## Desc: Given a 3D matrix and a number of iterations to ## iterations to run through, produce the final ## iteration of the game of life simulation ## --------------------------------------------------------- ## Input: matrix: A 3D Numpy array of booleans ## size: Int defining size (x,y,z) of matrix ## numiter: Number of iterations to run ## --------------------------------------------------------- ## Output: The resulting boolean matrix ## --------------------------------------------------------- def iterate(matrix, size, numiter): for iter in range(numiter): transfer = numpy.zeros((size,size,size), dtype=bool) for i in range(size): for j in range(size): for k in range(size): transfer[i,j,k] = checkalive(matrix, size, i,j,k) matrix = transfer return matrix ## Populate Function ## --------------------------------------------------------- ## Desc: Create the points and faces needed to generate a ## 3D Object ## --------------------------------------------------------- ## Input: matrix: A 3D Numpy array of booleans ## size: Int defining size (x,y,z) of matrix ## --------------------------------------------------------- ## Output: A list of (x,y,z) points and a list of points to ## connect via their indicies in the first list ## --------------------------------------------------------- def populate(matrix, size): points = [] faces =[] const = numpy.sqrt(1/4.0) for i in range(size): for j in range(size): for k in range(size): if matrix[i,j,k] == True: x,y,z = i-size/2, j-size/2, k-size/2 ## Make all the points in the cube newpoints = [ (x+const, y+const, z+const), (x+const, y+const, z-const), (x-const, y+const, z-const), (x-const, y+const, z+const), (x+const, y-const, z+const), (x+const, y-const, z-const), (x-const, y-const, z-const), (x-const, y-const, z+const), ] points.extend(newpoints) ## Grab the last 8 point indices p = range(len(points)-8, len(points)) ## Create all the necessary faces newfaces = [ (p[0],p[1],p[2],p[3]), (p[4],p[5],p[6],p[7]), (p[0],p[4],p[5],p[1]), (p[1],p[5],p[6],p[2]), (p[2],p[6],p[7],p[3]), (p[3],p[0],p[4],p[7]), ] faces.extend(newfaces) return points, faces ## Make Materials Function def makeMaterial(diffuse): mat = bpy.data.materials.new("Material") mat.diffuse_color = diffuse mat.diffuse_shader = 'LAMBERT' mat.diffuse_intensity = 1.0 mat.specular_color = (0,0,0) mat.specular_shader = 'COOKTORR' mat.specular_intensity = 0 mat.alpha = 1 mat.ambient = 1 return mat ## Set Materials Function def setMaterial(ob, mat): me = ob.data me.materials.append(mat) size = 8 matrix = numpy.zeros((size,size,size), dtype=bool) ## Take input from commandline ip = (sys.argv[6]) print(ip) val = [] ## Get bits from IP address [val.extend(list(bin(int(x)+256)[3:])) for x in "{0}".format(ip).split('.')] for i in range(2): for j in range(4): for k in range(4): matrix[1+j,1+k,3+i] = bool(int(val[i*16+j*4+k])) points, faces = populate(iterate(matrix,size,3),size) ## BLENDER STUFF FROM BLENDER TUTORIAL me = bpy.data.meshes.new("conway") # create a new mesh ob = bpy.data.objects.new("conway", me) # create an object with that mesh ob.location = bpy.context.scene.cursor_location # position object at 3d-cursor bpy.context.scene.objects.link(ob) # Link object to scene # Fill the mesh with verts, edges, faces me.from_pydata(points,[],faces) # edges or faces should be [], or you ask for problems me.update(calc_edges=True) # Update mesh with new data ## Make material ipbits = [int(i)/255.0 for i in ip.split(".")] blue = makeMaterial((ipbits[0],ipbits[1],ipbits[2])) setMaterial(ob,blue) ## Render Scene bpy.data.scenes["Scene"].render.filepath = '//%s' % (ip) bpy.ops.render.render( write_still=True )
{ "content_hash": "8f3ceb4b7031cd09d98468093107d2c9", "timestamp": "", "source": "github", "line_count": 167, "max_line_length": 88, "avg_line_length": 31.832335329341316, "alnum_prop": 0.5349887133182845, "repo_name": "databard/Life-3D", "id": "0bc29e37b001cfc90d97446229b1fc72c378a1e0", "size": "5316", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "5316" } ], "symlink_target": "" }
from collections import OrderedDict import tornado.web from .exceptions import ForecastError, SettingsError from .middlewares import Middlewares from .utils import import_object from .environment import Environment from .settings import Settings from .manager import Manager from .routes import Routes class Project(object): def __init__(self, environment=None, settings=None, manager=None, routes=None, middlewares=None): self.environment = self._initialize(environment, Environment) settings_list = self.environment.get_settings() self.settings = self._initialize(settings, Settings, settings_list) self.manager = self._initialize(manager, Manager, self.settings) self.routes = self._initialize(routes, Routes, self.settings) self.middlewares = self._initialize(middlewares, Middlewares, self.settings) self.applications = OrderedDict() def _initialize(self, attribute, cls, *args, **kwargs): if attribute is None: return cls(*args, **kwargs) return attribute def install_application(self, application_package, base_url=""): if application_package in self.applications: return try: _, application_class = import_object("%s.Application" % (application_package,)) except ImportError: raise ForecastError("%s.Application not found." % (application_package,)) application = application_class(application_package, self.settings) self.routes.register(application.routes, base_url) self.manager.register(application.commands) self.middlewares.register(application.middlewares) self.applications[application_package] = application def install_applications(self): if "forecast.applications.core" not in self.applications: self.install_application("forecast.applications.core") for application in self.settings.get('applications', []): if isinstance(application, basestring): application_path = application base_url = "" elif isinstance(application, (tuple, list)): application_path, base_url = application else: raise SettingsError("Invalid application spec: %r" % (application,)) self.install_application(application_path, base_url) def run(self): if not self.applications: self.install_applications() return self.manager.run(self) def get_tornado_application(self): handlers = self.routes.get_urlspecs() settings = self.settings['tornado'] return TornadoApplication(self, handlers=handlers, **settings) class TornadoApplication(tornado.web.Application): def __init__(self, project, *args, **kwargs): self.project = project super(TornadoApplication, self).__init__(*args, **kwargs)
{ "content_hash": "0fbfbdb680a00a98304639f30174ccf8", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 101, "avg_line_length": 36.924050632911396, "alnum_prop": 0.6739801165581076, "repo_name": "osantana/forecast", "id": "72129ca5e98f33b04cc054fea2209b79fc7c39e6", "size": "2935", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "forecast/project.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "64062" } ], "symlink_target": "" }
import click import globus_sdk from globus_cli.login_manager import LoginManager from globus_cli.parsing import ( ENDPOINT_PLUS_REQPATH, command, delete_and_rm_options, synchronous_task_wait_options, task_submission_options, ) from globus_cli.termio import Field, display, err_is_terminal, term_is_interactive from ._common import transfer_task_wait_with_io @command( "rm", short_help="Delete a single path; wait for it to complete", adoc_examples="""Delete a single file. [source,bash] ---- $ ep_id=ddb59af0-6d04-11e5-ba46-22000b92c6ec $ globus rm $ep_id:~/myfile.txt ---- Delete a directory recursively. [source,bash] ---- $ ep_id=ddb59af0-6d04-11e5-ba46-22000b92c6ec $ globus rm $ep_id:~/mydir --recursive ---- """, ) @task_submission_options @delete_and_rm_options(supports_batch=False, default_enable_globs=True) @synchronous_task_wait_options @click.argument("endpoint_plus_path", type=ENDPOINT_PLUS_REQPATH) @LoginManager.requires_login(LoginManager.TRANSFER_RS) def rm_command( *, login_manager: LoginManager, ignore_missing, star_silent, recursive, enable_globs, endpoint_plus_path, label, submission_id, dry_run, deadline, skip_activation_check, notify, meow, heartbeat, polling_interval, timeout, timeout_exit_code, ): """ Submit a Delete Task to delete a single path, and then block and wait for it to complete. Output is similar to *globus task wait*, and it is safe to *globus task wait* on a *globus rm* which timed out. Symbolic links are never followed - only unlinked (deleted). {AUTOMATIC_ACTIVATION} """ from globus_cli.services.transfer import autoactivate endpoint_id, path = endpoint_plus_path transfer_client = login_manager.get_transfer_client() # attempt to activate unless --skip-activation-check is given if not skip_activation_check: autoactivate(transfer_client, endpoint_id, if_expires_in=60) delete_data = globus_sdk.DeleteData( transfer_client, endpoint_id, label=label, recursive=recursive, submission_id=submission_id, deadline=deadline, additional_fields={ "ignore_missing": ignore_missing, "skip_activation_check": skip_activation_check, "interpret_globs": enable_globs, **notify, }, ) if not star_silent and enable_globs and path.endswith("*"): # not intuitive, but `click.confirm(abort=True)` prints to stdout # unnecessarily, which we don't really want... # only do this check if stderr is a pty if ( err_is_terminal() and term_is_interactive() and not click.confirm( f'Are you sure you want to delete all files matching "{path}"?', err=True, ) ): click.echo("Aborted.", err=True) click.get_current_context().exit(1) delete_data.add_item(path) if dry_run: display(delete_data, response_key="DATA", fields=[Field("Path", "path")]) # exit safely return # Print task submission to stderr so that `-Fjson` is still correctly # respected, as it will be by `task wait` res = transfer_client.submit_delete(delete_data) task_id = res["task_id"] click.echo(f'Delete task submitted under ID "{task_id}"', err=True) # do a `task wait` equivalent, including printing and correct exit status transfer_task_wait_with_io( transfer_client, meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code, )
{ "content_hash": "dbe027901ee7ea6239723bc764f2fb99", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 83, "avg_line_length": 27.671641791044777, "alnum_prop": 0.6426645091693636, "repo_name": "globus/globus-cli", "id": "d940b45fe3634ec1114d417183796999bae4c0c0", "size": "3708", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/globus_cli/commands/rm.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jinja", "bytes": "425" }, { "name": "Makefile", "bytes": "764" }, { "name": "Python", "bytes": "746729" }, { "name": "Shell", "bytes": "776" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('registration', '0001_initial'), ] operations = [ migrations.AlterField( model_name='hello', name='identifier', field=models.UUIDField(help_text='Unique identifier (uuid4)', primary_key=True, serialize=False, unique=True), ), migrations.AlterField( model_name='hello', name='message', field=models.TextField(help_text='Any hello message?', max_length=1000), ), migrations.AlterField( model_name='hello', name='name', field=models.TextField(help_text='Please specify a name', max_length=100), ), ]
{ "content_hash": "d9279acfd3104ff8e620747ff57ab4be", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 122, "avg_line_length": 29.285714285714285, "alnum_prop": 0.5841463414634146, "repo_name": "acreations/rockit", "id": "f8bc59b09b4ee81fa135e5b28c7a59a016633d65", "size": "891", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rockit/features/registration/migrations/0002_auto_20160904_0750.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1467" }, { "name": "Python", "bytes": "30409" } ], "symlink_target": "" }
import os import sys import sphinx_rtd_theme src_paths = [ os.path.abspath('src/mercury-common'), os.path.abspath('src/mercury-inventory'), os.path.abspath('src/mercury-log'), os.path.abspath('src/mercury-rpc') ] sys.path += src_paths # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.githubpages'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Mercury' copyright = '2017, Jared Rodriguez' author = 'Jared Rodriguez' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.0.7' # The full version, including alpha/beta/rc tags. release = '0.0.7' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'Mercurydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Mercury.tex', 'Mercury Documentation', 'Jared Rodriguez', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'mercury', 'Mercury Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Mercury', 'Mercury Documentation', author, 'Mercury', 'One line description of project.', 'Miscellaneous'), ]
{ "content_hash": "7b56b1fea229dcb8cea9f314d0f31259", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 78, "avg_line_length": 29.092105263157894, "alnum_prop": 0.6612392582541836, "repo_name": "jr0d/mercury", "id": "3e2a8e9b72985803e28c092ed0bfcef99d4df3b7", "size": "5106", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/conf.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "420" }, { "name": "Gherkin", "bytes": "62330" }, { "name": "Makefile", "bytes": "1627" }, { "name": "Python", "bytes": "323696" }, { "name": "Shell", "bytes": "1694" } ], "symlink_target": "" }
"""Host a class that controls the way we interact with quick pannel.""" import logging import sublime from ..error_vis.popup_error_vis import MIN_ERROR_SEVERITY log = logging.getLogger("ECC") class ErrorQuickPanelHandler(): """Handle the quick panel.""" ENTRY_TEMPLATE = "{type}: {error}" def __init__(self, view, errors): """Initialize the object. Args: view (sublime.View): Current view. errors (list(dict)): A list of error dicts. """ self.view = view self.errors = errors def items_to_show(self): """Present errors as list of lists.""" contents = [] for error_dict in self.errors: error_type = 'ERROR' if error_dict['severity'] < MIN_ERROR_SEVERITY: error_type = 'WARNING' contents.append( [ ErrorQuickPanelHandler.ENTRY_TEMPLATE.format( type=error_type, error=error_dict['error']), error_dict['file'] ]) return contents def on_done(self, idx): """Pick this error to navigate to a file.""" log.debug("Picked idx: %s", idx) if idx < 0 or idx >= len(self.errors): return None return self.view.window().open_file(self.__get_formatted_location(idx), sublime.ENCODED_POSITION) def __get_formatted_location(self, idx): picked_entry = self.errors[idx] return "{file}:{row}:{col}".format(file=picked_entry['file'], row=picked_entry['row'], col=picked_entry['col']) def show(self, window): """Show the quick panel.""" start_idx = 0 window.show_quick_panel( self.items_to_show(), self.on_done, sublime.MONOSPACE_FONT, start_idx)
{ "content_hash": "1790b0d55bf8517a5e4e4d686726f708", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 79, "avg_line_length": 31.634920634920636, "alnum_prop": 0.5132965378825891, "repo_name": "niosus/EasyClangComplete", "id": "0d488ec192244e3666b083ee268ddf0ca873fa4a", "size": "1993", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plugin/utils/quick_panel_handler.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "469" }, { "name": "C++", "bytes": "4969" }, { "name": "CMake", "bytes": "1160" }, { "name": "CSS", "bytes": "136" }, { "name": "Makefile", "bytes": "444" }, { "name": "Objective-C", "bytes": "4185" }, { "name": "Objective-C++", "bytes": "87" }, { "name": "Python", "bytes": "1615297" }, { "name": "Starlark", "bytes": "105" } ], "symlink_target": "" }
""" esky.bdist_esky.f_py2exe: bdist_esky support for py2exe """ from __future__ import with_statement import os import sys import imp import time import zipfile import marshal import struct import shutil import inspect import zipfile import ctypes from py2exe.distutils_build_exe import py2exe import esky from esky.util import is_core_dependency, ESKY_CONTROL_DIR from esky import winres try: import py2exe.mf as modulefinder except ImportError: modulefinder = None # Hack to make win32com work seamlessly with py2exe if modulefinder is not None: try: import win32com for p in win32com.__path__[1:]: modulefinder.AddPackagePath("win32com", p) for extra in ["win32com.shell"]: #,"win32com.mapi" __import__(extra) m = sys.modules[extra] for p in m.__path__[1:]: modulefinder.AddPackagePath(extra, p) except ImportError: pass class custom_py2exe(py2exe): """Custom py2exe command subclass. This py2exe command subclass incorporates some well-known py2exe "hacks" to make common third-party packages work better. """ def create_modulefinder(self): mf = py2exe.create_modulefinder(self) self.__mf = mf return mf def build_manifest(self,target,template): (mfest,mid) = py2exe.build_manifest(self,target,template) # Hack to get proper UI theme when freezing wxPython if mfest is not None: if "wx" in self.__mf.modules: mfest = mfest.replace("</assembly>",""" <dependency> <dependentAssembly> <assemblyIdentity type="win32" name="Microsoft.Windows.Common-Controls" version="6.0.0.0" processorArchitecture="*" publicKeyToken="6595b64144ccf1df" language="*" /> </dependentAssembly> </dependency> </assembly>""") return (mfest,mid) def freeze(dist): """Freeze the given distribution data using py2exe.""" includes = dist.includes excludes = dist.excludes options = dist.freezer_options # Merge in any encludes/excludes given in freezer_options includes.append("esky") for inc in options.pop("includes",()): includes.append(inc) for exc in options.pop("excludes",()): excludes.append(exc) if "pypy" not in includes and "pypy" not in excludes: excludes.append("pypy") # py2exe expects some arguments on the main distribution object. # We handle data_files ourselves, so fake it out for py2exe. if getattr(dist.distribution,"console",None): msg = "don't call setup(console=[...]) with esky;" msg += " use setup(scripts=[...]) instead" raise RuntimeError(msg) if getattr(dist.distribution,"windows",None): msg = "don't call setup(windows=[...]) with esky;" msg += " use setup(scripts=[...]) instead" raise RuntimeError(msg) dist.distribution.console = [] dist.distribution.windows = [] my_data_files = dist.distribution.data_files dist.distribution.data_files = [] for exe in dist.get_executables(): # Pass any executable kwds through to py2exe. # We handle "icon" and "gui_only" ourselves. s = exe._kwds.copy() s["script"] = exe.script s["dest_base"] = exe.name[:-4] if exe.icon is not None and "icon_resources" not in s: s["icon_resources"] = [(1,exe.icon)] if exe.gui_only: dist.distribution.windows.append(s) else: dist.distribution.console.append(s) if "zipfile" in options: dist.distribution.zipfile = options.pop("zipfile") # Create the py2exe cmd and adjust its options cmd = custom_py2exe(dist.distribution) cmd.includes = includes cmd.excludes = excludes if "bundle_files" in options: if options["bundle_files"] < 3 and dist.compile_bootstrap_exes: err = "can't compile bootstrap exes when bundle_files < 3" raise RuntimeError(err) for (nm,val) in options.iteritems(): setattr(cmd,nm,val) cmd.dist_dir = dist.freeze_dir cmd.finalize_options() # Actually run the freeze process cmd.run() # Copy data files into the freeze dir dist.distribution.data_files = my_data_files for (src,dst) in dist.get_data_files(): dst = os.path.join(dist.freeze_dir,dst) dstdir = os.path.dirname(dst) if not os.path.isdir(dstdir): dist.mkpath(dstdir) dist.copy_file(src,dst) # Place a marker file so we know how it was frozen os.mkdir(os.path.join(dist.freeze_dir,ESKY_CONTROL_DIR)) marker_file = os.path.join(ESKY_CONTROL_DIR,"f-py2exe-%d%d.txt")%sys.version_info[:2] open(os.path.join(dist.freeze_dir,marker_file),"w").close() # Copy package data into the library.zip # For now, we don't try to put package data into a bundled zipfile. dist_zipfile = dist.distribution.zipfile if dist_zipfile is None: for (src,arcnm) in dist.get_package_data(): err = "zipfile=None can't be used with package_data (yet...)" raise RuntimeError(err) elif not cmd.skip_archive: lib = zipfile.ZipFile(os.path.join(dist.freeze_dir,dist_zipfile),"a") for (src,arcnm) in dist.get_package_data(): lib.write(src,arcnm) lib.close() else: for (src,arcnm) in dist.get_package_data(): lib = os.path.join(dist.freeze_dir,os.path.dirname(dist_zipfile)) dest = os.path.join(lib, os.path.dirname(src)) f = os.path.basename(src) if not os.path.isdir(dest): dist.mkpath(dest) dist.copy_file(src,os.path.join(dest, f)) # There's no need to copy library.zip into the bootstrap env, as the # chainloader will run before py2exe goes looking for it. pass # Create the bootstraping code, using custom code if specified. # It gets stored as a marshalled list of code objects directly in the exe. esky_name = dist.distribution.get_name() code_source = ["__esky_name__ = %r" % (esky_name,)] code_source.append(inspect.getsource(esky.bootstrap)) if dist.compile_bootstrap_exes: from esky.bdist_esky import pypy_libpython from esky.bdist_esky import pypy_winres code_source.append(inspect.getsource(pypy_libpython)) code_source.append(inspect.getsource(pypy_winres)) code_source.append(_CUSTOM_PYPY_CHAINLOADER) code_source.append(dist.get_bootstrap_code()) code_source = "\n".join(code_source) for exe in dist.get_executables(normalise=False): if not exe.include_in_bootstrap_env: continue fexe = os.path.join(dist.freeze_dir,exe.name) bsexe = dist.compile_to_bootstrap_exe(exe,code_source) winres.copy_safe_resources(fexe,bsexe) # We may also need the bundled MSVCRT libs for nm in os.listdir(dist.freeze_dir): if is_core_dependency(nm) and nm.startswith("Microsoft"): dist.copy_to_bootstrap_env(nm) else: code_source.append(_FAKE_ESKY_BOOTSTRAP_MODULE) code_source.append(_CUSTOM_WIN32_CHAINLOADER) code_source.append(dist.get_bootstrap_code()) code_source.append("bootstrap()") code_source = "\n".join(code_source) code = marshal.dumps([compile(code_source,"__main__.py","exec")]) # Copy any core dependencies into the bootstrap env. for nm in os.listdir(dist.freeze_dir): if is_core_dependency(nm): dist.copy_to_bootstrap_env(nm) # Copy the loader program for each script into the bootstrap env. for exe in dist.get_executables(normalise=False): if not exe.include_in_bootstrap_env: continue exepath = dist.copy_to_bootstrap_env(exe.name) # Read the py2exe metadata from the frozen exe. We will # need to duplicate some of these fields when to rewrite it. coderes = winres.load_resource(exepath,u"PYTHONSCRIPT",1,0) headsz = struct.calcsize("iiii") (magic,optmz,unbfrd,codesz) = struct.unpack("iiii",coderes[:headsz]) assert magic == 0x78563412 # Insert the bootstrap code into the exe as a resource. # This appears to have the happy side-effect of stripping any # extra data from the end of the exe, which is exactly what we # want when zipfile=None is specified; otherwise each bootstrap # exe would also contain the whole bundled zipfile. coderes = struct.pack("iiii", magic, # magic value used for integrity checking, optmz, # optimization level to enable unbfrd, # whether to use unbuffered output len(code), ) + b"\x00" + code + b"\x00\x00" winres.add_resource(exepath,coderes,u"PYTHONSCRIPT",1,0) # If the python dll hasn't been copied into the bootstrap env, # make sure it's stored in each bootstrap dll as a resource. pydll = u"python%d%d.dll" % sys.version_info[:2] if not os.path.exists(os.path.join(dist.bootstrap_dir,pydll)): buf = ctypes.create_string_buffer(3000) GetModuleFileNameA = ctypes.windll.kernel32.GetModuleFileNameA if not GetModuleFileNameA(sys.dllhandle,ctypes.byref(buf),3000): raise ctypes.WinError() with open(buf.value,"rb") as f: pydll_bytes = f.read() for exe in dist.get_executables(normalise=False): if not exe.include_in_bootstrap_env: continue exepath = os.path.join(dist.bootstrap_dir,exe.name) try: winres.load_resource(exepath,pydll.upper(),1,0) except EnvironmentError: winres.add_resource(exepath,pydll_bytes,pydll.upper(),1,0) # Code to fake out any bootstrappers that try to import from esky. _FAKE_ESKY_BOOTSTRAP_MODULE = """ class __fake: __all__ = () sys.modules["esky"] = __fake() sys.modules["esky.bootstrap"] = __fake() """ # On Windows, execv is flaky and expensive. If the chainloader is the same # python version as the target exe, we can munge sys.path to bootstrap it # into the existing process. # # We need to read the script to execute as a resource from the exe, so this # only works if we can bootstrap a working ctypes module. We then insert # the source code from esky.winres.load_resource directly into this function. # _CUSTOM_WIN32_CHAINLOADER = """ _orig_chainload = _chainload def _chainload(target_dir): # Be careful to escape percent-sign, this gets interpolated below marker_file = pathjoin(ESKY_CONTROL_DIR,"f-py2exe-%%d%%d.txt")%%sys.version_info[:2] pydll = "python%%s%%s.dll" %% sys.version_info[:2] mydir = dirname(sys.executable) # Check that the target directory is the same version of python as this # bootstrapping script. If not, we can't chainload it in-process. if not exists(pathjoin(target_dir,marker_file)): return _orig_chainload(target_dir) # Check whether the target directory contains unbundled C extensions. # These require a physical python dll on disk next to the running # executable, so we must have such a dll in order to chainload. # bootstrapping script. If not, we can't chainload it in-process. for nm in listdir(target_dir): if nm == pydll: continue if nm.lower().startswith("msvcr"): continue if nm.lower().endswith(".pyd") or nm.lower().endswith(".dll"): # The freeze dir contains unbundled C extensions. if not exists(pathjoin(mydir,pydll)): return _orig_chainload(target_dir) else: break # Munge the environment to pretend we're in the target dir. # This will let us load modules from inside it. # If we fail for whatever reason, we can't chainload in-process. try: import nt except ImportError: return _orig_chainload(target_dir) sys.bootstrap_executable = sys.executable sys.executable = pathjoin(target_dir,basename(sys.executable)) verify(sys.executable) sys.prefix = sys.prefix.replace(mydir,target_dir) sys.argv[0] = sys.executable for i in xrange(len(sys.path)): sys.path[i] = sys.path[i].replace(mydir,target_dir) # If we're in the bootstrap dir, try to chdir into the version dir. # This is sometimes necessary for loading of DLLs by relative path. curdir = getcwd() if curdir == mydir: nt.chdir(target_dir) # Use the library.zip from the version dir. # It should already be in sys.path from the above env mangling, # but you never know... libfile = pathjoin(target_dir,"library.zip") if libfile not in sys.path: if exists(libfile): sys.path.append(libfile) else: sys.path.append(target_dir) # Try to import the modules we need for bootstrapping. # If we fail for whatever reason, we can't chainload in-process. try: import zipextimporter; zipextimporter.install() except ImportError: pass try: import ctypes import struct import marshal import msvcrt except ImportError: return _orig_chainload(target_dir) # The source for esky.winres.load_resource gets inserted below. # This allows us to grab the code out of the frozen version exe. from ctypes import c_char, POINTER k32 = ctypes.windll.kernel32 LOAD_LIBRARY_AS_DATAFILE = 0x00000002 _DEFAULT_RESLANG = 1033 %s # Great, now we magically have the load_resource function :-) try: data = load_resource(sys.executable,u"PYTHONSCRIPT",1,0) except EnvironmentError: # This will trigger if sys.executable doesn't exist. # Falling back to the original chainloader will account for # the unlikely case where sys.executable is a backup file. return _orig_chainload(target_dir) else: sys.modules.pop("esky",None) sys.modules.pop("esky.bootstrap",None) headsz = struct.calcsize("iiii") (magic,optmz,unbfrd,codesz) = struct.unpack("iiii",data[:headsz]) assert magic == 0x78563412 # Set up the environment requested by "optimized" flag. # Currently "unbuffered" is not supported at run-time since I # haven't figured out the necessary incantations. try: opt_var = ctypes.c_int.in_dll(ctypes.pythonapi,"Py_OptimizeFlag") opt_var.value = optmz except ValueError: pass # Skip over the archive name to find start of code codestart = headsz while data[codestart] != "\\0": codestart += 1 codestart += 1 codeend = codestart + codesz codelist = marshal.loads(data[codestart:codeend]) # Execute all code in the context of __main__ module. d_locals = d_globals = sys.modules["__main__"].__dict__ d_locals["__name__"] = "__main__" for code in codelist: exec(code, d_globals, d_locals) raise SystemExit(0) """ % (inspect.getsource(winres.load_resource).replace("\n","\n"+" "*4),) # On Windows, execv is flaky and expensive. Since the pypy-compiled bootstrap # exe doesn't have a python runtime, it needs to chainload the one from the # target version dir before trying to bootstrap in-process. _CUSTOM_PYPY_CHAINLOADER = """ import nt from pypy.rlib.rstruct.runpack import runpack import time; _orig_chainload = _chainload def _chainload(target_dir): mydir = dirname(sys.executable) pydll = pathjoin(target_dir,"python%s%s.dll" % sys.version_info[:2]) if not exists(pydll): return _orig_chainload(target_dir) else: # Munge the environment for DLL loading purposes try: environ["PATH"] = environ["PATH"] + ";" + target_dir except KeyError: environ["PATH"] = target_dir # Get the target python env up and running verify(pydll) py = libpython(pydll) py.Set_NoSiteFlag(1) py.Set_FrozenFlag(1) py.Set_IgnoreEnvironmentFlag(1) py.SetPythonHome("") py.Initialize() # Extract the marshalled code data from the target executable, # store it into a python string object. target_exe = pathjoin(target_dir,basename(sys.executable)) verify(target_exe) try: py_data = load_resource_pystr(py,target_exe,"PYTHONSCRIPT",1,0) except EnvironmentError: return _orig_chainload(target_dir) data = py.String_AsString(py_data) headsz = 16 # <-- struct.calcsize("iiii") headdata = rffi.charpsize2str(rffi.cast(rffi.CCHARP,data),headsz) (magic,optmz,unbfrd,codesz) = runpack("iiii",headdata) assert magic == 0x78563412 # skip over the archive name to find start of code codestart = headsz while data[codestart] != "\\0": codestart += 1 codestart += 1 codeend = codestart + codesz assert codeend > 0 # Tweak the python env according to the py2exe frozen metadata py.Set_OptimizeFlag(optmz) # TODO: set up buffering # If you can decide on buffered/unbuffered before loading up # the python runtime, this can be done by just setting the # PYTHONUNBUFFERED environment variable. If not, we have to # do it ourselves like this: #if unbfrd: # setmode(0,nt.O_BINARY) # setmode(1,nt.O_BINARY) # setvbuf(stdin,NULL,4,512) # setvbuf(stdout,NULL,4,512) # setvbuf(stderr,NULL,4,512) # Preted the python env is running from within the frozen executable syspath = "%s;%s\\library.zip;%s" % (target_exe,target_dir,target_dir,) py.Sys_SetPath(syspath); sysmod = py.Import_ImportModule("sys") sysargv = py.List_New(len(sys.argv)) for i in xrange(len(sys.argv)): py.List_SetItem(sysargv,i,py.String_FromString(sys.argv[i])) py.Object_SetAttrString(sysmod,"argv",sysargv) py.Object_SetAttrString(sysmod,"frozen",py.String_FromString("py2exe")) py.Object_SetAttrString(sysmod,"executable",py.String_FromString(target_exe)) py.Object_SetAttrString(sysmod,"bootstrap_executable",py.String_FromString(sys.executable)) py.Object_SetAttrString(sysmod,"prefix",py.String_FromString(dirname(target_exe))) curdir = getcwd() if curdir == mydir: nt.chdir(target_dir) # Execute the marshalled list of code objects globals = py.Dict_New() py.Dict_SetItemString(globals,"__builtins__",py.Eval_GetBuiltins()) py.Dict_SetItemString(globals,"FROZEN_DATA",py_data) runcode = "FROZEN_DATA = FROZEN_DATA[%d:%d]\\n" % (codestart,codeend,) runcode += "import sys\\n" runcode += "import marshal\\n" runcode += "d_locals = d_globals = sys.modules['__main__'].__dict__\\n" runcode += "d_locals['__name__'] = '__main__'\\n" runcode += "for code in marshal.loads(FROZEN_DATA):\\n" runcode += " exec code in d_globals, d_locals\\n" py.Run_String(runcode,py.file_input,globals) # Clean up after execution. py.Finalize() sys.exit(0) """
{ "content_hash": "a32ccc204c1a5709f1e27b7f3f8bef19", "timestamp": "", "source": "github", "line_count": 484, "max_line_length": 97, "avg_line_length": 40.31198347107438, "alnum_prop": 0.6340013325816206, "repo_name": "kinnarr/esky", "id": "3b52a3eaee97fbfbc5d607008484a1d67bd58af4", "size": "19632", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "esky/bdist_esky/f_py2exe.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "393439" } ], "symlink_target": "" }
from __future__ import unicode_literals # Third-party imports from mock import patch # Local imports from .. import topic from ...tests import TestGCP @patch('cloudify_gcp.utils.assure_resource_id_correct', return_value=True) @patch('cloudify_gcp.gcp.ServiceAccountCredentials.from_json_keyfile_dict') @patch('cloudify_gcp.utils.get_gcp_resource_name', return_value='valid_name') @patch('cloudify_gcp.gcp.build') class TestGCPTopic(TestGCP): def test_create(self, mock_build, *args): topic.create('valid_name', ) mock_build().projects().topics( ).create.assert_called_once_with( body={}, name='projects/not really a project/topics/valid_name') def test_delete(self, mock_build, *args): self.ctxmock.instance.runtime_properties['name'] = 'valid_name' topic.delete() mock_build.assert_called_once()
{ "content_hash": "896533cb8eaf561e6960e5a3529f11c3", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 77, "avg_line_length": 31.571428571428573, "alnum_prop": 0.6866515837104072, "repo_name": "cloudify-cosmo/cloudify-gcp-plugin", "id": "4ec498f773e0ccd6f8892d6292ee75ac41ac2d92", "size": "1552", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cloudify_gcp/pubsub/tests/test_topic.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "497449" } ], "symlink_target": "" }
from gluon import current #from gluon.html import * from gluon.storage import Storage from s3 import S3CustomController THEME = "historic.CRMT" # ============================================================================= class index(S3CustomController): """ Custom Home Page """ def __call__(self): output = {} # Latest Activities db = current.db s3db = current.s3db atable = s3db.project_activity query = (atable.deleted == False) output["total_activities"] = db(query).count() #gtable = s3db.gis_location #query &= (atable.location_id == gtable.id) ogtable = s3db.org_group ltable = s3db.project_activity_group query &= (atable.id == ltable.activity_id) & \ (ogtable.id == ltable.group_id) rows = db(query).select(atable.id, atable.name, atable.date, #gtable.L3, ogtable.name, limitby = (0, 3), orderby = ~atable.date ) latest_activities = [] current.deployment_settings.L10n.date_format = "%d %b %y" drepresent = atable.date.represent for row in rows: date = row["project_activity.date"] if date: nice_date = drepresent(date) else: nice_date = "" latest_activities.append(Storage(id = row["project_activity.id"], name = row["project_activity.name"], date = nice_date, date_iso = date or "", org_group = row["org_group.name"], #location = row["gis_location.L3"], )) output["latest_activities"] = latest_activities # Which Map should we link to in "Know your community"? auth = current.auth table = s3db.gis_config if auth.is_logged_in() and auth.user.org_group_id: # Coalition Map ogtable = s3db.org_group og = db(ogtable.id == auth.user.org_group_id).select(ogtable.pe_id, limitby=(0, 1) ).first() query = (table.pe_id == og.pe_id) else: # Default Map query = (table.uuid == "SITE_DEFAULT") config = db(query).select(table.id, limitby=(0, 1) ).first() try: output["config_id"] = config.id except: output["config_id"] = None self._view(THEME, "index.html") return output # END =========================================================================
{ "content_hash": "154771b2a9bfdfbba31acdfad3842efb", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 81, "avg_line_length": 37.18072289156626, "alnum_prop": 0.4147764095917045, "repo_name": "flavour/eden", "id": "ce6ca4b6a0703c8994daf4685f615b4ef37a372b", "size": "3111", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "modules/templates/historic/CRMT/controllers.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "727" }, { "name": "CSS", "bytes": "3351335" }, { "name": "HTML", "bytes": "1367727" }, { "name": "JavaScript", "bytes": "20109418" }, { "name": "NSIS", "bytes": "3934" }, { "name": "PHP", "bytes": "15220" }, { "name": "Python", "bytes": "31407527" }, { "name": "Ruby", "bytes": "8291" }, { "name": "Shell", "bytes": "5059" }, { "name": "XSLT", "bytes": "3274119" } ], "symlink_target": "" }
import _plotly_utils.basevalidators class HovertemplatesrcValidator(_plotly_utils.basevalidators.SrcValidator): def __init__( self, plotly_name="hovertemplatesrc", parent_name="scattergl", **kwargs ): super(HovertemplatesrcValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "none"), **kwargs, )
{ "content_hash": "4381bf619a824c645d832aa8e32d877c", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 79, "avg_line_length": 33.61538461538461, "alnum_prop": 0.6247139588100686, "repo_name": "plotly/plotly.py", "id": "23f214e92356ba6738a79c11db0558d8d756041e", "size": "437", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/python/plotly/plotly/validators/scattergl/_hovertemplatesrc.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "545" }, { "name": "JavaScript", "bytes": "2074" }, { "name": "PostScript", "bytes": "565328" }, { "name": "Python", "bytes": "31506317" }, { "name": "TypeScript", "bytes": "71337" } ], "symlink_target": "" }
"""Apache Beam SDK version information and utilities.""" import re __version__ = None def get_version(): global __version__ if not __version__: __version__ = get_version_from_pom() return __version__ # Read the version from pom.xml file def get_version_from_pom(): with open('pom.xml', 'r') as f: pom = f.read() regex = (r'.*<parent>\s*' r'<groupId>[a-z\.]+</groupId>\s*' r'<artifactId>[a-z\-]+</artifactId>\s*' r'<version>([0-9a-zA-Z\.\-]+)</version>.*') pattern = re.compile(str(regex)) search = pattern.search(pom) version = search.group(1) version = version.replace("-SNAPSHOT", ".dev") return version if __name__ == '__main__': __version__ = get_version_from_pom()
{ "content_hash": "1a440d64325347bf449bc061736e3b14", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 56, "avg_line_length": 23.060606060606062, "alnum_prop": 0.5663600525624178, "repo_name": "chamikaramj/incubator-beam", "id": "ef07dbf99f99c49a467f1de8c6e64b51b5311894", "size": "1546", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sdks/python/apache_beam/version.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "22626" }, { "name": "Java", "bytes": "9932061" }, { "name": "Protocol Buffer", "bytes": "1407" }, { "name": "Shell", "bytes": "10104" } ], "symlink_target": "" }
import abc import re import unidecode from learning_text_transformer import spoken_word_to_number # expandk should work on lowercase k e.g. 30k too? # TransformExtractNumber should extract multiple numbers! # TransformRemoveWords needs to be hardcoded with a list of terms, should learn # terms from input text # spoken words to numbers must allow empty strings # get_transforms needs cleanup # Do I need 'configure' still? class Transform(abc.ABC): @abc.abstractmethod def apply(self, s): pass def configure(self, **kwargs): pass def __str__(self): return self.__class__.__name__ + "()" def __repr__(self): return self.__str__() def serialise(self): return self.__class__.__name__, dict() @classmethod def factory(cls, input_strings, output_strings): cs = [cls()] return cs @classmethod def deserialise(cls, parameters): c = cls() return c class TransformExtractNumber(Transform): nbrs = set([str(n) for n in range(10)]) def apply(self, s): check = False for c in s: if c in self.nbrs: check = True if check: # look for numbers and/or comma, replace the commas result = re.findall('\d+', re.sub('[\d,]+', lambda x: x.group(0).replace(',', ''), s)) return " ".join(result) else: return "" # no numbers to extract return s # don't transform as a default class TransformExpandK(Transform): """Expand a k or K into 000 if proceeded by an integer""" def apply(self, s): match_char = None if 'K' in s: match_char = "K" elif 'k' in s: match_char = "k" if match_char: s = re.sub('(\d+{})'.format(match_char), lambda x: x.group(0).replace(match_char, '000'), s) return s class TransformRemoveDot00(Transform): def apply(self, s): return s.replace('.00', '') class TransformSpokenWordsToNumbers(Transform): def apply(self, s): try: s = str(spoken_word_to_number.spoken_word_to_number(s)) except (KeyError, AssertionError): pass # lots of things cause this converter to break return s class TransformLowercase(Transform): def apply(self, s): return s.lower() class TransformStrip(Transform): def apply(self, s): # strip multiple spaces s1 = re.sub("[\s]+", " ", s) return s1.strip() class TransformRemoveWords(Transform): def configure(self, **kwargs): terms = kwargs['terms'] assert len(terms) == 1 self.terms = terms[0] @classmethod # this version makes Transform based on lots of input possibilites # but those explodes the current possibility-space! def factory(cls, input_strings, output_strings): cs = [] tokens = set() for input_string in input_strings: tokens.update([tok.strip() for tok in input_string.split()]) for token in tokens: c = cls() c.terms = token cs.append(c) return cs @classmethod def deserialise(cls, parameters): c = cls() c.terms = parameters['terms'] return c def serialise(self): return self.__class__.__name__, {"terms": self.terms} def apply(self, s): s = s.replace(self.terms, "") return s def __str__(self): return self.__class__.__name__ + "(" + repr(self.terms) + ")" class TransformUnidecode(Transform): def apply(self, s): return unidecode.unidecode(s) class Serialisation(object): def _deserialise_transform(self, transform_name, parameters): """""" for transform_cls in Transform.__subclasses__(): if transform_cls.__name__ == transform_name: return transform_cls.deserialise(parameters) else: raise ValueError() def serialise(self, transforms): """JSON Serialise the sequence of transforms""" serialised_raw = [] for transform in transforms: serialised_raw.append(transform.serialise()) return serialised_raw def deserialise(self, serialised_raw): """Deserialise from JSON and return instantiated transforms""" transforms = [] for name, parameters in serialised_raw: t = self._deserialise_transform(name, parameters) transforms.append(t) return transforms def get_transforms(input_strings, output_strings): # note that the ordering is non-deterministic all_transforms = [] for transform in Transform.__subclasses__(): ts = transform.factory(input_strings, output_strings) all_transforms += ts all_transforms.sort(key=lambda x: x.__class__.__name__) return all_transforms #def get_transforms_OLD(): #all_transforms = [] #for transform in Transform.__subclasses__(): #t = [transform()] #if t[0].__class__.__name__ == "TransformRemoveWords": #t = [] #for term in ["Ltd", "Limited"]: #t_new = transform() #t_new.configure(terms=[term]) #t.append(t_new) #all_transforms += t #return all_transforms #def get_transformsX(mod): #transforms = [] #for c in dir(mod): #print(c) #c = getattr(mod, c) #try: #if c is Transform: #continue #if issubclass(c, Transform): #transforms.append(c) #except TypeError: #pass #return transforms
{ "content_hash": "4bee8c4ed2881fc8371a93b730ae2167", "timestamp": "", "source": "github", "line_count": 206, "max_line_length": 104, "avg_line_length": 27.54368932038835, "alnum_prop": 0.5786041593232287, "repo_name": "ianozsvald/learning_text_transformer", "id": "fe045f6e739305eb350f61aeacf5acdd1222896a", "size": "5674", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "learning_text_transformer/transforms.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "44266" } ], "symlink_target": "" }
from sys import version_info if version_info >= (2, 6, 0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_SimCartesianTransformOperator_3D_Uniform', [dirname(__file__)]) except ImportError: import _SimCartesianTransformOperator_3D_Uniform return _SimCartesianTransformOperator_3D_Uniform if fp is not None: try: _mod = imp.load_module('_SimCartesianTransformOperator_3D_Uniform', fp, pathname, description) finally: fp.close() return _mod _SimCartesianTransformOperator_3D_Uniform = swig_import_helper() del swig_import_helper else: import _SimCartesianTransformOperator_3D_Uniform del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self, class_type, name, value, static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name, None) if method: return method(self, value) if (not static): if _newclass: object.__setattr__(self, name, value) else: self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self, class_type, name, value): return _swig_setattr_nondynamic(self, class_type, name, value, 0) def _swig_getattr_nondynamic(self, class_type, name, static=1): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name, None) if method: return method(self) if (not static): return object.__getattr__(self, name) else: raise AttributeError(name) def _swig_getattr(self, class_type, name): return _swig_getattr_nondynamic(self, class_type, name, 0) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object: pass _newclass = 0 try: import weakref weakref_proxy = weakref.proxy except: weakref_proxy = lambda x: x import base class SimCartesianTransformOperator(base.SimRepresentationItem): __swig_setmethods__ = {} for _s in [base.SimRepresentationItem]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimCartesianTransformOperator, name, value) __swig_getmethods__ = {} for _s in [base.SimRepresentationItem]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimCartesianTransformOperator, name) __repr__ = _swig_repr def Axis1(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_Axis1(self, *args) def Axis2(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_Axis2(self, *args) def LocalOrigin(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_LocalOrigin(self, *args) def Scale(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_Scale(self, *args) def Coordinates(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_Coordinates(self, *args) def __init__(self, *args): this = _SimCartesianTransformOperator_3D_Uniform.new_SimCartesianTransformOperator(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator__clone(self, f, c) __swig_destroy__ = _SimCartesianTransformOperator_3D_Uniform.delete_SimCartesianTransformOperator __del__ = lambda self: None SimCartesianTransformOperator_swigregister = _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_swigregister SimCartesianTransformOperator_swigregister(SimCartesianTransformOperator) class SimCartesianTransformOperator_3D(SimCartesianTransformOperator): __swig_setmethods__ = {} for _s in [SimCartesianTransformOperator]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimCartesianTransformOperator_3D, name, value) __swig_getmethods__ = {} for _s in [SimCartesianTransformOperator]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimCartesianTransformOperator_3D, name) __repr__ = _swig_repr def Axis3(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Axis3(self, *args) def __init__(self, *args): this = _SimCartesianTransformOperator_3D_Uniform.new_SimCartesianTransformOperator_3D(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D__clone(self, f, c) __swig_destroy__ = _SimCartesianTransformOperator_3D_Uniform.delete_SimCartesianTransformOperator_3D __del__ = lambda self: None SimCartesianTransformOperator_3D_swigregister = _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_swigregister SimCartesianTransformOperator_3D_swigregister(SimCartesianTransformOperator_3D) class SimCartesianTransformOperator_3D_Uniform(SimCartesianTransformOperator_3D): __swig_setmethods__ = {} for _s in [SimCartesianTransformOperator_3D]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimCartesianTransformOperator_3D_Uniform, name, value) __swig_getmethods__ = {} for _s in [SimCartesianTransformOperator_3D]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimCartesianTransformOperator_3D_Uniform, name) __repr__ = _swig_repr def SimCartesianTransformOperator_PlaneTransform(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_SimCartesianTransformOperator_PlaneTransform(self, *args) def SimCartesianTransformOperator_CurrentAspectRatio(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_SimCartesianTransformOperator_CurrentAspectRatio(self, *args) def SimCartesianTransformOperator_NewAspectRatio(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_SimCartesianTransformOperator_NewAspectRatio(self, *args) def __init__(self, *args): this = _SimCartesianTransformOperator_3D_Uniform.new_SimCartesianTransformOperator_3D_Uniform(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform__clone(self, f, c) __swig_destroy__ = _SimCartesianTransformOperator_3D_Uniform.delete_SimCartesianTransformOperator_3D_Uniform __del__ = lambda self: None SimCartesianTransformOperator_3D_Uniform_swigregister = _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_swigregister SimCartesianTransformOperator_3D_Uniform_swigregister(SimCartesianTransformOperator_3D_Uniform) class SimCartesianTransformOperator_3D_Uniform_sequence(base.sequence_common): __swig_setmethods__ = {} for _s in [base.sequence_common]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimCartesianTransformOperator_3D_Uniform_sequence, name, value) __swig_getmethods__ = {} for _s in [base.sequence_common]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimCartesianTransformOperator_3D_Uniform_sequence, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimCartesianTransformOperator_3D_Uniform.new_SimCartesianTransformOperator_3D_Uniform_sequence(*args) try: self.this.append(this) except: self.this = this def assign(self, n, x): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_assign(self, n, x) def begin(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_begin(self, *args) def end(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_end(self, *args) def rbegin(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_rbegin(self, *args) def rend(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_rend(self, *args) def at(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_at(self, *args) def front(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_front(self, *args) def back(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_back(self, *args) def push_back(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_push_back(self, *args) def pop_back(self): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_pop_back(self) def detach_back(self, pop=True): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_detach_back(self, pop) def insert(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_insert(self, *args) def erase(self, *args): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_erase(self, *args) def detach(self, position, r, erase=True): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_detach(self, position, r, erase) def swap(self, x): return _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_swap(self, x) __swig_destroy__ = _SimCartesianTransformOperator_3D_Uniform.delete_SimCartesianTransformOperator_3D_Uniform_sequence __del__ = lambda self: None SimCartesianTransformOperator_3D_Uniform_sequence_swigregister = _SimCartesianTransformOperator_3D_Uniform.SimCartesianTransformOperator_3D_Uniform_sequence_swigregister SimCartesianTransformOperator_3D_Uniform_sequence_swigregister(SimCartesianTransformOperator_3D_Uniform_sequence) # This file is compatible with both classic and new-style classes.
{ "content_hash": "a5cc5d1ae7168c214aa3bd83c67781f9", "timestamp": "", "source": "github", "line_count": 262, "max_line_length": 169, "avg_line_length": 45.29007633587786, "alnum_prop": 0.7134670487106017, "repo_name": "EnEff-BIM/EnEffBIM-Framework", "id": "67648e33f544d66b95b2ffb8503fecf7738f3824", "size": "12073", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SimModel_Python_API/simmodel_swig/Release/SimCartesianTransformOperator_3D_Uniform.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "397980994" }, { "name": "HTML", "bytes": "124134" }, { "name": "Python", "bytes": "2480972" } ], "symlink_target": "" }
from datetime import datetime import itertools import math import time def shift(seq, n): n = n % len(seq) return seq[n:] + seq[:n] starttime = datetime.now() letters = [] for i in range(42, 127): letters.append(chr(i)) key = "asklx" total = 0 for char in key: total += ord(char) shift(key, ((total * len(key)) % 26) % len(key)) regex = [] for char in key: regex.append((ord(char) - 41) % len(key)) matches = 0 pw_count = 0 for string in itertools.permutations(letters, len(key)): pw_count += 1 if pw_count % 1000000 == 0: print("progress: " + str(math.floor(((pw_count / math.pow(84, len(key))) * 100) + 0.5)) + "% time: " + str(datetime.now() - starttime) + " time remaining: " + str(time.strftime('%H:%M:%S', time.gmtime(int(math.pow(84, len(key)) / (pw_count / (datetime.now() - starttime).total_seconds())) - int((datetime.now() - starttime).total_seconds())))) + " speed: " + str(int(pw_count / (datetime.now() - starttime).total_seconds())) + "pw/s") total = 0 for char in string: total += ord(char) shift(string, ((total * len(string)) % 26)) tmp_buffer = [] for char in string: tmp_buffer.append(((ord(char) - 41) / len(string)) % 10) if tmp_buffer == regex: matches += 1 print("matches: " + str(matches) + " execution took: " + str(datetime.now() - starttime) + " seconds")
{ "content_hash": "167d67f752a98f9a89390fa84619f380", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 436, "avg_line_length": 27.040816326530614, "alnum_prop": 0.6203773584905661, "repo_name": "theXbit/PyEncrypt", "id": "069d3d1dfe8c62c77d43c706410eb3b0423bbb2a", "size": "1325", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "passwordmatches.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "36186" } ], "symlink_target": "" }
import sys from abc import ABC, abstractmethod from enum import Enum from io import BytesIO from typing import TypeVar, Generic, Iterable, Collection from pyflink.common.serializer import TypeSerializer from pyflink.datastream.functions import RuntimeContext, InternalWindowFunction from pyflink.datastream.state import StateDescriptor, State from pyflink.metrics import MetricGroup __all__ = ['Window', 'TimeWindow', 'CountWindow', 'WindowAssigner', 'MergingWindowAssigner', 'TriggerResult', 'Trigger', 'TimeWindowSerializer', 'CountWindowSerializer'] MAX_LONG_VALUE = sys.maxsize def long_to_int_with_bit_mixing(x: int) -> int: x = (x ^ (x >> 30)) * 0xbf58476d1ce4e5b9 x = (x ^ (x >> 27)) * 0x94d049bb133111eb x = x ^ (x >> 31) return x def mod_inverse(x: int) -> int: inverse = x * x * x inverse *= 2 - x * inverse inverse *= 2 - x * inverse inverse *= 2 - x * inverse return inverse class Window(ABC): """ Window is a grouping of elements into finite buckets. Windows have a maximum timestamp which means that, at some point, all elements that go into one window will have arrived. """ @abstractmethod def max_timestamp(self) -> int: pass class TimeWindow(Window): """ Window that represents a time interval from start (inclusive) to end (exclusive). """ def __init__(self, start: int, end: int): super(TimeWindow, self).__init__() self.start = start self.end = end def max_timestamp(self) -> int: return self.end - 1 def intersects(self, other: 'TimeWindow') -> bool: """ Returns True if this window intersects the given window. """ return self.start <= other.end and self.end >= other.start def cover(self, other: 'TimeWindow') -> 'TimeWindow': """ Returns the minimal window covers both this window and the given window. """ return TimeWindow(min(self.start, other.start), max(self.end, other.end)) @staticmethod def get_window_start_with_offset(timestamp: int, offset: int, window_size: int): """ Method to get the window start for a timestamp. :param timestamp: epoch millisecond to get the window start. :param offset: The offset which window start would be shifted by. :param window_size: The size of the generated windows. :return: window start """ return timestamp - (timestamp - offset + window_size) % window_size def __hash__(self): return self.start + mod_inverse((self.end << 1) + 1) def __eq__(self, other): return self.__class__ == other.__class__ and self.end == other.end \ and self.start == other.start def __lt__(self, other: 'TimeWindow'): if not isinstance(other, TimeWindow): raise Exception("Does not support comparison with non-TimeWindow %s" % other) return self.start == other.start and self.end < other.end or self.start < other.start def __le__(self, other: 'TimeWindow'): return self.__eq__(other) and self.__lt__(other) def __repr__(self): return "TimeWindow(start={}, end={})".format(self.start, self.end) class CountWindow(Window): """ A Window that represents a count window. For each count window, we will assign a unique id. Thus this CountWindow can act as namespace part in state. We can attach data to each different CountWindow. """ def __init__(self, id: int): super(CountWindow, self).__init__() self.id = id def max_timestamp(self) -> int: return MAX_LONG_VALUE def __hash__(self): return long_to_int_with_bit_mixing(self.id) def __eq__(self, other): return self.__class__ == other.__class__ and self.id == other.id def __repr__(self): return "CountWindow(id={})".format(self.id) class TimeWindowSerializer(TypeSerializer[TimeWindow]): def __init__(self): self._underlying_coder = None def serialize(self, element: TimeWindow, stream: BytesIO) -> None: if self._underlying_coder is None: self._underlying_coder = self._get_coder() bytes_data = self._underlying_coder.encode(element) stream.write(bytes_data) def deserialize(self, stream: BytesIO) -> TimeWindow: if self._underlying_coder is None: self._underlying_coder = self._get_coder() bytes_data = stream.read(16) return self._underlying_coder.decode(bytes_data) def _get_coder(self): try: from pyflink.fn_execution import coder_impl_fast as coder_impl except: from pyflink.fn_execution import coder_impl_slow as coder_impl return coder_impl.TimeWindowCoderImpl() class CountWindowSerializer(TypeSerializer[CountWindow]): def __init__(self): self._underlying_coder = None def serialize(self, element: CountWindow, stream: BytesIO) -> None: if self._underlying_coder is None: self._underlying_coder = self._get_coder() bytes_data = self._underlying_coder.encode(element) stream.write(bytes_data) def deserialize(self, stream: BytesIO) -> CountWindow: if self._underlying_coder is None: self._underlying_coder = self._get_coder() bytes_data = stream.read(8) return self._underlying_coder.decode(bytes_data) def _get_coder(self): try: from pyflink.fn_execution import coder_impl_fast as coder_impl except: from pyflink.fn_execution import coder_impl_slow as coder_impl return coder_impl.CountWindowCoderImpl() T = TypeVar('T') W = TypeVar('W') W2 = TypeVar('W2') IN = TypeVar('IN') OUT = TypeVar('OUT') KEY = TypeVar('KEY') class TriggerResult(Enum): """ Result type for trigger methods. This determines what happens with the window, for example whether the window function should be called, or the window should be discarded. If a :class:`Trigger` returns TriggerResult.FIRE or TriggerResult.FIRE_AND_PURGE but the window does not contain any data the window function will not be invoked, i.e. no data will be produced for the window. - CONTINUE: No action is taken on the window. - FIRE_AND_PURGE: Evaluates the window function and emits the 'window result'. - FIRE: On FIRE, the window is evaluated and results are emitted. The window is not purged though, all elements are retained. - PURGE: All elements in the window are cleared and the window is discarded, without evaluating the window function or emitting any elements. """ CONTINUE = (False, False) FIRE_AND_PURGE = (True, True) FIRE = (True, False) PURGE = (False, True) def is_fire(self) -> bool: return self.value[0] def is_purge(self) -> bool: return self.value[1] class Trigger(ABC, Generic[T, W]): """ A Trigger determines when a pane of a window should be evaluated to emit the results for that part of the window. A pane is the bucket of elements that have the same key (assigned by the KeySelector) and same Window. An element can be in multiple panes if it was assigned to multiple windows by the WindowAssigner. These panes all have their own instance of the Trigger. Triggers must not maintain state internally since they can be re-created or reused for different keys. All necessary state should be persisted using the state abstraction available on the TriggerContext. When used with a MergingWindowAssigner the Trigger must return true from :func:`can_merge` and :func:`on_merge` most be properly implemented. """ class TriggerContext(ABC): """ A context object that is given to :class:`Trigger` methods to allow them to register timer callbacks and deal with state. """ @abstractmethod def get_current_processing_time(self) -> int: """ :return: The current processing time. """ pass @abstractmethod def get_metric_group(self) -> MetricGroup: """ Returns the metric group for this :class:`Trigger`. This is the same metric group that would be returned from :func:`~pyflink.datasteam.functions.RuntimeContext.get_metric_group` in a user function. :return: The metric group. """ pass @abstractmethod def get_current_watermark(self) -> int: """ :return: The current watermark time. """ pass @abstractmethod def register_processing_time_timer(self, time: int) -> None: """ Register a system time callback. When the current system time passes the specified time :func:`~Trigger.on_processing_time` is called with the time specified here. :param time: The time at which to invoke :func:`~Trigger.on_processing_time`. """ pass @abstractmethod def register_event_time_timer(self, time: int) -> None: """ Register an event-time callback. When the current watermark passes the specified time :func:`~Trigger.on_event_time` is called with the time specified here. :param time: The watermark at which to invoke :func:`~Trigger.on_event_time`. """ pass @abstractmethod def delete_processing_time_timer(self, time: int) -> None: """ Delete the processing time trigger for the given time. """ pass @abstractmethod def delete_event_time_timer(self, time: int) -> None: """ Delete the event-time trigger for the given time. """ pass @abstractmethod def get_partitioned_state(self, state_descriptor: StateDescriptor) -> State: """ Retrieves a :class:`State` object that can be used to interact with fault-tolerant state that is scoped to the window and key of the current trigger invocation. :param state_descriptor: The StateDescriptor that contains the name and type of the state that is being accessed. :return: The partitioned state object. """ pass class OnMergeContext(TriggerContext): """ Extension of :class:`TriggerContext` that is given to :func:`~Trigger.on_merge`. """ @abstractmethod def merge_partitioned_state(self, state_descriptor: StateDescriptor) -> None: pass @abstractmethod def on_element(self, element: T, timestamp: int, window: W, ctx: 'Trigger.TriggerContext') -> TriggerResult: """ Called for every element that gets added to a pane. The result of this will determine whether the pane is evaluated to emit results. :param element: The element that arrived. :param timestamp: The timestamp of the element that arrived. :param window: The window to which the element is being added. :param ctx: A context object that can be used to register timer callbacks. """ pass @abstractmethod def on_processing_time(self, time: int, window: W, ctx: 'Trigger.TriggerContext') -> TriggerResult: """ Called when a processing-time timer that was set using the trigger context fires. :param time: The timestamp at which the timer fired. :param window: The window for which the timer fired. :param ctx: A context object that can be used to register timer callbacks. """ pass @abstractmethod def on_event_time(self, time: int, window: W, ctx: 'Trigger.TriggerContext') -> TriggerResult: """ Called when an event-time timer that was set using the trigger context fires. :param time: The timestamp at which the timer fired. :param window: The window for which the timer fired. :param ctx: A context object that can be used to register timer callbacks. """ pass def can_merge(self) -> bool: """ .. note:: If this returns true you must properly implement :func:`~Trigger.on_merge` :return: True if this trigger supports merging of trigger state and can therefore be used with a MergingWindowAssigner. """ return False @abstractmethod def on_merge(self, window: W, ctx: 'Trigger.OnMergeContext') -> None: """ Called when several windows have been merged into one window by the :class:`WindowAssigner`. :param window: The new window that results from the merge. :param ctx: A context object that can be used to register timer callbacks and access state. """ pass @abstractmethod def clear(self, window: W, ctx: 'Trigger.TriggerContext') -> None: """ Clears any state that the trigger might still hold for the given window. This is called when a window is purged. Timers set using :func:`~TriggerContext.register_event_time_timer` and :func:`~TriggerContext.register_processing_time_timer` should be deleted here as well as state acquired using :func:`~TriggerContext.get_partitioned_state`. """ pass class WindowAssigner(ABC, Generic[T, W]): """ A :class:`WindowAssigner` assigns zero or more :class:`Window` to an element. In a window operation, elements are grouped by their key (if available) and by the windows to which it was assigned. The set of elements with the same key and window is called a pane. When a :class:`Trigger` decides that a certain pane should fire the WindowFunction is applied to produce output elements for that pane. """ class WindowAssignerContext(ABC): """ A context provided to the :class:`WindowAssigner` that allows it to query the current processing time. """ @abstractmethod def get_current_processing_time(self) -> int: """ :return: The current processing time. """ pass @abstractmethod def get_runtime_context(self) -> RuntimeContext: """ :return: The current runtime context. """ pass @abstractmethod def assign_windows(self, element: T, timestamp: int, context: 'WindowAssigner.WindowAssignerContext') -> Collection[W]: """ :param element: The element to which windows should be assigned. :param timestamp: The timestamp of the element. :param context: The :class:`WindowAssignerContext` in which the assigner operates. :return: A collection of windows that should be assigned to the element. """ pass @abstractmethod def get_default_trigger(self, env) -> Trigger[T, W]: """ :param env: The StreamExecutionEnvironment used to compile the DataStream job. :return: The default trigger associated with this :class:`WindowAssigner`. """ pass @abstractmethod def get_window_serializer(self) -> TypeSerializer[W]: """ :return: A :class:`TypeSerializer` for serializing windows that are assigned by this :class:`WindowAssigner`. """ pass @abstractmethod def is_event_time(self) -> bool: """ :return: True if elements are assigned to windows based on event time, false otherwise. """ pass class MergingWindowAssigner(WindowAssigner[T, W]): """ A `WindowAssigner` that can merge windows. """ class MergeCallback(ABC, Generic[W2]): """ Callback to be used in :func:`~MergingWindowAssigner.merge_windows` for specifying which windows should be merged. """ @abstractmethod def merge(self, to_be_merged: Iterable[W2], merge_result: W2) -> None: """ Specifies that the given windows should be merged into the result window. :param to_be_merged: The list of windows that should be merged into one window. :param merge_result: The resulting merged window. """ pass @abstractmethod def merge_windows(self, windows: Iterable[W], callback: 'MergingWindowAssigner.MergeCallback[W]') -> None: """ Determines which windows (if any) should be merged. :param windows: The window candidates. :param callback: A callback that can be invoked to signal which windows should be merged. """ pass class WindowOperationDescriptor(object): def __init__(self, assigner: WindowAssigner, trigger: Trigger, allowed_lateness: int, window_state_descriptor: StateDescriptor, window_serializer: TypeSerializer, internal_window_function: InternalWindowFunction): self.assigner = assigner self.trigger = trigger self.allowed_lateness = allowed_lateness self.window_state_descriptor = window_state_descriptor self.internal_window_function = internal_window_function self.window_serializer = window_serializer
{ "content_hash": "82a4c9c9251a51ba0e4d7b7c9b32e30b", "timestamp": "", "source": "github", "line_count": 508, "max_line_length": 100, "avg_line_length": 35.076771653543304, "alnum_prop": 0.6189460688029631, "repo_name": "tillrohrmann/flink", "id": "058311bf6e75882c34e9270cd0417196b692f17d", "size": "18777", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flink-python/pyflink/datastream/window.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "20448" }, { "name": "Batchfile", "bytes": "1863" }, { "name": "C", "bytes": "847" }, { "name": "Clojure", "bytes": "84400" }, { "name": "Dockerfile", "bytes": "5563" }, { "name": "FreeMarker", "bytes": "86639" }, { "name": "GAP", "bytes": "139514" }, { "name": "HTML", "bytes": "135625" }, { "name": "HiveQL", "bytes": "78611" }, { "name": "Java", "bytes": "83158201" }, { "name": "JavaScript", "bytes": "1829" }, { "name": "Less", "bytes": "65918" }, { "name": "Makefile", "bytes": "5134" }, { "name": "Python", "bytes": "2433935" }, { "name": "Scala", "bytes": "10501870" }, { "name": "Shell", "bytes": "525933" }, { "name": "TypeScript", "bytes": "288472" }, { "name": "q", "bytes": "7406" } ], "symlink_target": "" }
from Util import Util import mailbox import json from errors import * DEFAULT_MAIL_VIEW_COUNT = 20 MAIL_SIZE_UNLIMITED = -1 class Mail: @staticmethod def GET(svc, session, params, action): if (session is None): raise Unauthorized('login first') if not session.CheckScope('bbs'): raise NoPerm("out of scope") if (action == 'list'): folder = svc.get_str(params, 'folder', 'inbox') start = svc.get_int(params, 'start', 0) end = svc.get_int(params, 'end', 0) count = svc.get_int(params, 'count', 0) svc.writedata(Mail.List(session.GetUser(), folder, start, count, end)) elif (action == 'view'): folder = svc.get_str(params, 'folder', 'inbox') index = svc.get_int(params, 'index') start = svc.get_int(params, 'start', 0) count = svc.get_int(params, 'count', 0) if start < 0 or count < 0: raise WrongArgs('start or count < 0') svc.writedata(Mail.View(session.GetUser(), folder, index, start, count)) elif (action == 'check_unread'): folder = svc.get_str(params, 'folder', 'inbox') index = svc.get_int(params, 'index', 0) result = {'unread': Mail.CheckUnread(session.GetUser(), folder, index)} svc.writedata(json.dumps(result)) elif action == 'quote': folder = svc.get_str(params, 'folder', 'inbox') mode = svc.get_str(params, 'mode', 'S') index = svc.get_int(params, 'index') (title, content) = session.GetUser().mbox.quote_mail(folder, mode, index) result = {'title': title, 'content': content} svc.writedata(json.dumps(result)) else: raise WrongArgs('unknown action') @staticmethod def POST(svc, session, params, action): if session is None: raise Unauthorized('login first') if not session.CheckScope('bbs'): raise NoPerm('out of scope') if action == 'send': title = svc.get_str(params, 'title') content = svc.get_str(params, 'content') receiver_id = svc.get_str(params, 'to') signature_id = svc.get_int(params, 'signature_id', 0) save_in_sent = svc.get_bool(params, 'save_in_sent', True) session.GetUser().SendMailTo(receiver_id, title, content, signature_id, session, save_in_sent) result = {'result': 'ok'} svc.writedata(json.dumps(result)) else: raise WrongArgs('unknown action') @staticmethod def List(user, folder, start, count, end): mbox = mailbox.MailBox(user.GetName()) folder = mbox.get_folder(folder) total = folder.count() start, end = Util.CheckRange(start, end, count, DEFAULT_MAIL_VIEW_COUNT, total) if (start <= end and start >= 1 and end <= total): result = '{ "start": %d, "end": %d, "mails": [\n' % (start, end) first = True for i in range(start - 1, end): entry = folder.get_entry(i) if entry is None: continue if not first: result += ',\n' post = entry.GetInfo('mail') post['id'] = i+1 result += json.dumps(post) first = False result += '\n]}' return result else: raise OutOfRange('out of range') @staticmethod def View(user, folder, index, start, count): mbox = mailbox.MailBox(user.GetName()) folder = mbox.get_folder(folder) entry = folder.get_entry(index - 1) if (entry is None): raise OutOfRange('out of range') post = folder.get_content(index - 1) if (post is None): raise OutOfRange('out of range') info = dict(entry.GetInfo().items() + post.GetInfo(start, count).items()) info['id'] = index if not entry.IsRead(): entry.SetRead(True) folder.set_entry(index - 1, entry) # session.GetUserInfo().SetMailCheck() return json.dumps(info) @staticmethod def CheckUnread(user, folder = 'inbox', index = 0): mbox = mailbox.MailBox(user.GetName()) folder = mbox.get_folder(folder) if (index == 0): index = folder.count() entry = folder.get_entry(index - 1) if (entry is None): raise OutOfRange('index out of range') return not entry.IsRead()
{ "content_hash": "b17147746dd71c8efbdf9eb37dae6648", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 87, "avg_line_length": 38.425, "alnum_prop": 0.5415311212318369, "repo_name": "HenryHu/pybbs", "id": "319de5549d4ea66993202e36cff7fcd3c1f10a58", "size": "4633", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mail.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "HTML", "bytes": "18422" }, { "name": "Python", "bytes": "446796" } ], "symlink_target": "" }
import gal_uvw, cv_coord, euler import numpy as np def get_uvw_sun(vlsr=220): usun, vsun, wsun = -8.5, 13.38 + vlsr, 6.49 # the signs are in the coord system of gal_uvw # e.g. U points toward anticenter return usun,vsun,wsun def correct_pm(ra, dec, pmra, pmdec, dist, vlsr=220): """Corrects the proper motion for the speed of the Sun Arguments: ra - RA in deg dec -- Declination in deg pmra -- pm in RA in mas/yr pmdec -- pm in declination in mas/yr dist -- distance in kpc Returns: (pmra,pmdec) the tuple with the proper motions corrected for the Sun's motion """ one = ra * 0 + 1 zero = ra * 0 usun,vsun,wsun = get_uvw_sun(vlsr=vlsr) dist_pc = dist * 1000. ur, vr, wr = gal_uvw.gal_uvw(distance=dist_pc, ra=ra, dec=dec, pmra=one, pmdec=zero, vrad=zero) ud, vd, wd = gal_uvw.gal_uvw(distance=dist_pc, ra=ra, dec=dec, pmra=zero, pmdec=one, vrad=zero) d_pmra = -(ur * usun + vr * vsun + wr * wsun) / (ur**2 + vr**2 + wr**2) d_pmdec = -(ud * usun + vd * vsun + wd * wsun) / (ud**2 + vd**2 + wd**2) # d_pmra d_pmdec -- these should be the pm's of the non-moving object as seen from the moving Sun return (pmra-d_pmra,pmdec-d_pmdec) def correct_vel(ra, dec, vel, vlsr=220): """Corrects the proper motion for the speed of the Sun Arguments: ra - RA in deg dec -- Declination in deg pmra -- pm in RA in mas/yr pmdec -- pm in declination in mas/yr dist -- distance in kpc Returns: (pmra,pmdec) the tuple with the proper motions corrected for the Sun's motion """ l,b = euler.euler(ra, dec) l = np.deg2rad(l) b = np.deg2rad(b) usun,vsun,wsun = get_uvw_sun(vlsr=vlsr) delta = -usun*np.cos(l)*np.cos(b) + vsun * np.sin(l) * np.cos(b) + wsun * np.sin(b) # projection of the sun's velocity to the lign of sight vector # notice the first minus -- it is because the usun is in the coord system where # X points towards anticenter #return the corrected velocity return vel + delta
{ "content_hash": "ee68044f5afff93374494cf81c6c4dc1", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 98, "avg_line_length": 31.047619047619047, "alnum_prop": 0.656441717791411, "repo_name": "plasidu/phoenix4iraf", "id": "6481bcbf7ca21a5f7663e1bc5b9c70fb57ed210d", "size": "1956", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "astrolibpy/my_utils/correct_pm.py", "mode": "33188", "license": "mit", "language": [ { "name": "Gnuplot", "bytes": "155" }, { "name": "Python", "bytes": "258947" }, { "name": "Shell", "bytes": "139" } ], "symlink_target": "" }
from __future__ import absolute_import, division, print_function import operator from operator import add, getitem import inspect from numbers import Number from collections import Iterable from bisect import bisect from itertools import product, count from collections import Iterator from functools import partial, wraps from toolz.curried import (pipe, partition, concat, unique, pluck, join, first, memoize, map, groupby, valmap, accumulate, merge, curry, reduce, interleave, sliding_window) import numpy as np from threading import Lock from . import chunk from .slicing import slice_array from . import numpy_compat from ..utils import deepmap, ignoring, repr_long_list from ..compatibility import unicode from .. import threaded, core from ..context import _globals names = ('x_%d' % i for i in count(1)) tokens = ('-%d' % i for i in count(1)) def getarray(a, b, lock=None): """ Mimics getitem but includes call to np.asarray >>> getarray([1, 2, 3, 4, 5], slice(1, 4)) array([2, 3, 4]) """ if lock: lock.acquire() try: c = a[b] if type(c) != np.ndarray: c = np.asarray(c) finally: if lock: lock.release() return c from .optimization import optimize def slices_from_chunks(chunks): """ Translate chunks tuple to a set of slices in product order >>> slices_from_chunks(((2, 2), (3, 3, 3))) # doctest: +NORMALIZE_WHITESPACE [(slice(0, 2, None), slice(0, 3, None)), (slice(0, 2, None), slice(3, 6, None)), (slice(0, 2, None), slice(6, 9, None)), (slice(2, 4, None), slice(0, 3, None)), (slice(2, 4, None), slice(3, 6, None)), (slice(2, 4, None), slice(6, 9, None))] """ cumdims = [list(accumulate(add, (0,) + bds[:-1])) for bds in chunks] shapes = product(*chunks) starts = product(*cumdims) return [tuple(slice(s, s+dim) for s, dim in zip(start, shape)) for start, shape in zip(starts, shapes)] def getem(arr, chunks, shape=None): """ Dask getting various chunks from an array-like >>> getem('X', chunks=(2, 3), shape=(4, 6)) # doctest: +SKIP {('X', 0, 0): (getarray, 'X', (slice(0, 2), slice(0, 3))), ('X', 1, 0): (getarray, 'X', (slice(2, 4), slice(0, 3))), ('X', 1, 1): (getarray, 'X', (slice(2, 4), slice(3, 6))), ('X', 0, 1): (getarray, 'X', (slice(0, 2), slice(3, 6)))} >>> getem('X', chunks=((2, 2), (3, 3))) # doctest: +SKIP {('X', 0, 0): (getarray, 'X', (slice(0, 2), slice(0, 3))), ('X', 1, 0): (getarray, 'X', (slice(2, 4), slice(0, 3))), ('X', 1, 1): (getarray, 'X', (slice(2, 4), slice(3, 6))), ('X', 0, 1): (getarray, 'X', (slice(0, 2), slice(3, 6)))} """ chunks = normalize_chunks(chunks, shape) keys = list(product([arr], *[range(len(bds)) for bds in chunks])) values = [(getarray, arr, x) for x in slices_from_chunks(chunks)] return dict(zip(keys, values)) def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs): """ Dot product of many aligned chunks >>> x = np.array([[1, 2], [1, 2]]) >>> y = np.array([[10, 20], [10, 20]]) >>> dotmany([x, x, x], [y, y, y]) array([[ 90, 180], [ 90, 180]]) Optionally pass in functions to apply to the left and right chunks >>> dotmany([x, x, x], [y, y, y], rightfunc=np.transpose) array([[150, 150], [150, 150]]) """ if leftfunc: A = map(leftfunc, A) if rightfunc: B = map(rightfunc, B) return sum(map(partial(np.dot, **kwargs), A, B)) def lol_tuples(head, ind, values, dummies): """ List of list of tuple keys Parameters ---------- head : tuple The known tuple so far ind : Iterable An iterable of indices not yet covered values : dict Known values for non-dummy indices dummies : dict Ranges of values for dummy indices Examples -------- >>> lol_tuples(('x',), 'ij', {'i': 1, 'j': 0}, {}) ('x', 1, 0) >>> lol_tuples(('x',), 'ij', {'i': 1}, {'j': range(3)}) [('x', 1, 0), ('x', 1, 1), ('x', 1, 2)] >>> lol_tuples(('x',), 'ij', {'i': 1}, {'j': range(3)}) [('x', 1, 0), ('x', 1, 1), ('x', 1, 2)] >>> lol_tuples(('x',), 'ijk', {'i': 1}, {'j': [0, 1, 2], 'k': [0, 1]}) # doctest: +NORMALIZE_WHITESPACE [[('x', 1, 0, 0), ('x', 1, 0, 1)], [('x', 1, 1, 0), ('x', 1, 1, 1)], [('x', 1, 2, 0), ('x', 1, 2, 1)]] """ if not ind: return head if ind[0] not in dummies: return lol_tuples(head + (values[ind[0]],), ind[1:], values, dummies) else: return [lol_tuples(head + (v,), ind[1:], values, dummies) for v in dummies[ind[0]]] def zero_broadcast_dimensions(lol, nblocks): """ >>> lol = [('x', 1, 0), ('x', 1, 1), ('x', 1, 2)] >>> nblocks = (4, 1, 2) # note singleton dimension in second place >>> lol = [[('x', 1, 0, 0), ('x', 1, 0, 1)], ... [('x', 1, 1, 0), ('x', 1, 1, 1)], ... [('x', 1, 2, 0), ('x', 1, 2, 1)]] >>> zero_broadcast_dimensions(lol, nblocks) # doctest: +NORMALIZE_WHITESPACE [[('x', 1, 0, 0), ('x', 1, 0, 1)], [('x', 1, 0, 0), ('x', 1, 0, 1)], [('x', 1, 0, 0), ('x', 1, 0, 1)]] See Also -------- lol_tuples """ f = lambda t: (t[0],) + tuple(0 if d == 1 else i for i, d in zip(t[1:], nblocks)) return deepmap(f, lol) def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1,))): """ Find block dimensions from arguments Parameters ---------- argpairs: iterable name, ijk index pairs numblocks: dict maps {name: number of blocks} sentinels: iterable (optional) values for singleton dimensions Examples -------- >>> argpairs = [('x', 'ij'), ('y', 'ji')] >>> numblocks = {'x': (2, 3), 'y': (3, 2)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Supports numpy broadcasting rules >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> numblocks = {'x': (2, 1), 'y': (1, 3)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Works in other contexts too >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> d = {'x': ('Hello', 1), 'y': (1, (2, 3))} >>> broadcast_dimensions(argpairs, d) {'i': 'Hello', 'j': (2, 3)} """ # List like [('i', 2), ('j', 1), ('i', 1), ('j', 2)] L = concat([zip(inds, dims) for (x, inds), (x, dims) in join(first, argpairs, first, numblocks.items())]) g = groupby(0, L) g = dict((k, set([d for i, d in v])) for k, v in g.items()) g2 = dict((k, v - set(sentinels) if len(v) > 1 else v) for k, v in g.items()) if g2 and not set(map(len, g2.values())) == set([1]): raise ValueError("Shapes do not align %s" % g) return valmap(first, g2) def top(func, output, out_indices, *arrind_pairs, **kwargs): """ Tensor operation Applies a function, ``func``, across blocks from many different input dasks. We arrange the pattern with which those blocks interact with sets of matching indices. E.g. top(func, 'z', 'i', 'x', 'i', 'y', 'i') yield an embarassingly parallel communication pattern and is read as z_i = func(x_i, y_i) More complex patterns may emerge, including multiple indices top(func, 'z', 'ij', 'x', 'ij', 'y', 'ji') $$ z_{ij} = func(x_{ij}, y_{ji}) $$ Indices missing in the output but present in the inputs results in many inputs being sent to one function (see examples). Examples -------- Simple embarassing map operation >>> inc = lambda x: x + 1 >>> top(inc, 'z', 'ij', 'x', 'ij', numblocks={'x': (2, 2)}) # doctest: +SKIP {('z', 0, 0): (inc, ('x', 0, 0)), ('z', 0, 1): (inc, ('x', 0, 1)), ('z', 1, 0): (inc, ('x', 1, 0)), ('z', 1, 1): (inc, ('x', 1, 1))} Simple operation on two datasets >>> add = lambda x, y: x + y >>> top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij', numblocks={'x': (2, 2), ... 'y': (2, 2)}) # doctest: +SKIP {('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)), ('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 1)), ('z', 1, 0): (add, ('x', 1, 0), ('y', 1, 0)), ('z', 1, 1): (add, ('x', 1, 1), ('y', 1, 1))} Operation that flips one of the datasets >>> addT = lambda x, y: x + y.T # Transpose each chunk >>> # z_ij ~ x_ij y_ji >>> # .. .. .. notice swap >>> top(addT, 'z', 'ij', 'x', 'ij', 'y', 'ji', numblocks={'x': (2, 2), ... 'y': (2, 2)}) # doctest: +SKIP {('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)), ('z', 0, 1): (add, ('x', 0, 1), ('y', 1, 0)), ('z', 1, 0): (add, ('x', 1, 0), ('y', 0, 1)), ('z', 1, 1): (add, ('x', 1, 1), ('y', 1, 1))} Dot product with contraction over ``j`` index. Yields list arguments >>> top(dotmany, 'z', 'ik', 'x', 'ij', 'y', 'jk', numblocks={'x': (2, 2), ... 'y': (2, 2)}) # doctest: +SKIP {('z', 0, 0): (dotmany, [('x', 0, 0), ('x', 0, 1)], [('y', 0, 0), ('y', 1, 0)]), ('z', 0, 1): (dotmany, [('x', 0, 0), ('x', 0, 1)], [('y', 0, 1), ('y', 1, 1)]), ('z', 1, 0): (dotmany, [('x', 1, 0), ('x', 1, 1)], [('y', 0, 0), ('y', 1, 0)]), ('z', 1, 1): (dotmany, [('x', 1, 0), ('x', 1, 1)], [('y', 0, 1), ('y', 1, 1)])} Supports Broadcasting rules >>> top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij', numblocks={'x': (1, 2), ... 'y': (2, 2)}) # doctest: +SKIP {('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)), ('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 1)), ('z', 1, 0): (add, ('x', 0, 0), ('y', 1, 0)), ('z', 1, 1): (add, ('x', 0, 1), ('y', 1, 1))} """ numblocks = kwargs['numblocks'] argpairs = list(partition(2, arrind_pairs)) assert set(numblocks) == set(pluck(0, argpairs)) all_indices = pipe(argpairs, pluck(1), concat, set) dummy_indices = all_indices - set(out_indices) # Dictionary mapping {i: 3, j: 4, ...} for i, j, ... the dimensions dims = broadcast_dimensions(argpairs, numblocks) # (0, 0), (0, 1), (0, 2), (1, 0), ... keytups = list(product(*[range(dims[i]) for i in out_indices])) # {i: 0, j: 0}, {i: 0, j: 1}, ... keydicts = [dict(zip(out_indices, tup)) for tup in keytups] # {j: [1, 2, 3], ...} For j a dummy index of dimension 3 dummies = dict((i, list(range(dims[i]))) for i in dummy_indices) # Create argument lists valtups = [] for kd in keydicts: args = [] for arg, ind in argpairs: tups = lol_tuples((arg,), ind, kd, dummies) tups2 = zero_broadcast_dimensions(tups, numblocks[arg]) args.append(tups2) valtups.append(tuple(args)) # Add heads to tuples keys = [(output,) + kt for kt in keytups] vals = [(func,) + vt for vt in valtups] return dict(zip(keys, vals)) def _concatenate2(arrays, axes=[]): """ Recursively Concatenate nested lists of arrays along axes Each entry in axes corresponds to each level of the nested list. The length of axes should correspond to the level of nesting of arrays. >>> x = np.array([[1, 2], [3, 4]]) >>> _concatenate2([x, x], axes=[0]) array([[1, 2], [3, 4], [1, 2], [3, 4]]) >>> _concatenate2([x, x], axes=[1]) array([[1, 2, 1, 2], [3, 4, 3, 4]]) >>> _concatenate2([[x, x], [x, x]], axes=[0, 1]) array([[1, 2, 1, 2], [3, 4, 3, 4], [1, 2, 1, 2], [3, 4, 3, 4]]) Supports Iterators >>> _concatenate2(iter([x, x]), axes=[1]) array([[1, 2, 1, 2], [3, 4, 3, 4]]) """ if isinstance(arrays, Iterator): arrays = list(arrays) if len(axes) > 1: arrays = [_concatenate2(a, axes=axes[1:]) for a in arrays] return np.concatenate(arrays, axis=axes[0]) def rec_concatenate(arrays, axis=0): """ Recursive np.concatenate >>> x = np.array([1, 2]) >>> rec_concatenate([[x, x], [x, x], [x, x]]) array([[1, 2, 1, 2], [1, 2, 1, 2], [1, 2, 1, 2]]) """ if not arrays: return np.array([]) if isinstance(arrays, Iterator): arrays = list(arrays) if isinstance(arrays[0], Iterator): arrays = list(map(list, arrays)) if not isinstance(arrays[0], np.ndarray) and not hasattr(arrays[0], '__array__'): arrays = [rec_concatenate(a, axis=axis + 1) for a in arrays] if arrays[0].ndim <= axis: arrays = [a[None, ...] for a in arrays] if len(arrays) == 1: return arrays[0] return np.concatenate(arrays, axis=axis) def map_blocks(x, func, chunks=None, dtype=None): """ Map a function across all blocks of a dask array You must also specify the chunks of the resulting array. If you don't then we assume that the resulting array has the same block structure as the input. >>> import dask.array as da >>> x = da.ones((8,), chunks=(4,)) >>> np.array(x.map_blocks(lambda x: x + 1)) array([ 2., 2., 2., 2., 2., 2., 2., 2.]) If function changes shape of the blocks provide a chunks >>> y = x.map_blocks(lambda x: x[::2], chunks=(2,)) Or, if the result is ragged, provide a chunks >>> y = x.map_blocks(lambda x: x[::2], chunks=((2, 2),)) Your block function can learn where in the array it is if it supports a block_id keyword argument. This will receive entries like (2, 0, 1), the position of the block in the dask array. >>> def func(block, block_id=None): ... pass """ if not chunks: chunks = x.chunks elif not isinstance(chunks[0], tuple): chunks = tuple([nb * (bs,) for nb, bs in zip(x.numblocks, chunks)]) name = next(names) try: spec = inspect.getargspec(func) except: spec = None if spec and 'block_id' in spec.args: dsk = dict(((name,) + k[1:], (partial(func, block_id=k[1:]), k)) for k in core.flatten(x._keys())) else: dsk = dict(((name,) + k[1:], (func, k)) for k in core.flatten(x._keys())) return Array(merge(dsk, x.dask), name, chunks, dtype=dtype) @wraps(np.squeeze) def squeeze(a, axis=None): if axis is None: axis = tuple(i for i, d in enumerate(a.shape) if d == 1) b = a.map_blocks(partial(np.squeeze, axis=axis), dtype=a.dtype) chunks = tuple(bd for bd in b.chunks if bd != (1,)) old_keys = list(product([b.name], *[range(len(bd)) for bd in b.chunks])) new_keys = list(product([b.name], *[range(len(bd)) for bd in chunks])) dsk = b.dask.copy() for o, n in zip(old_keys, new_keys): dsk[n] = dsk[o] del dsk[o] return Array(dsk, b.name, chunks, dtype=a.dtype) def topk(k, x): """ The top k elements of an array Returns the k greatest elements of the array in sorted order. Only works on arrays of a single dimension. >>> x = np.array([5, 1, 3, 6]) >>> d = from_array(x, chunks=2) >>> d.topk(2).compute() array([6, 5]) Runs in near linear time, returns all results in a single chunk so all k elements must fit in memory. """ if x.ndim != 1: raise ValueError("Topk only works on arrays of one dimension") name = next(names) dsk = dict(((name, i), (chunk.topk, k, key)) for i, key in enumerate(x._keys())) name2 = next(names) dsk[(name2, 0)] = (getitem, (np.sort, (np.concatenate, (list, list(dsk)))), slice(-1, -k - 1, -1)) chunks = ((k,),) return Array(merge(dsk, x.dask), name2, chunks, dtype=x.dtype) def compute(*args, **kwargs): """ Evaluate several dask arrays at once The result of this function is always a tuple of numpy arrays. To evaluate a single dask array into a numpy array, use ``myarray.compute()`` or simply ``np.array(myarray)``. Examples -------- >>> import dask.array as da >>> d = da.ones((4, 4), chunks=(2, 2)) >>> a = d + 1 # two different dask arrays >>> b = d + 2 >>> A, B = da.compute(a, b) # Compute both simultaneously """ dsk = merge(*[arg.dask for arg in args]) keys = [arg._keys() for arg in args] results = get(dsk, keys, **kwargs) results2 = tuple(rec_concatenate(x) if arg.shape else unpack_singleton(x) for x, arg in zip(results, args)) return results2 def store(sources, targets, **kwargs): """ Store dask arrays in array-like objects, overwrite data in target This stores dask arrays into object that supports numpy-style setitem indexing. It stores values chunk by chunk so that it does not have to fill up memory. For best performance you can align the block size of the storage target with the block size of your array. If your data fits in memory then you may prefer calling ``np.array(myarray)`` instead. Parameters ---------- sources: Array or iterable of Arrays targets: array-like or iterable of array-likes These should support setitem syntax ``target[10:20] = ...`` Examples -------- >>> x = ... # doctest: +SKIP >>> import h5py # doctest: +SKIP >>> f = h5py.File('myfile.hdf5') # doctest: +SKIP >>> dset = f.create_dataset('/data', shape=x.shape, ... chunks=x.chunks, ... dtype='f8') # doctest: +SKIP >>> store(x, dset) # doctest: +SKIP Alternatively store many arrays at the same time >>> store([x, y, z], [dset1, dset2, dset3]) # doctest: +SKIP """ if isinstance(sources, Array): sources = [sources] targets = [targets] if any(not isinstance(s, Array) for s in sources): raise ValueError("All sources must be dask array objects") if len(sources) != len(targets): raise ValueError("Different number of sources [%d] and targets [%d]" % (len(sources), len(targets))) updates = [insert_to_ooc(tgt, src) for tgt, src in zip(targets, sources)] dsk = merge([src.dask for src in sources] + updates) keys = [key for u in updates for key in u] get(dsk, keys, **kwargs) def blockdims_from_blockshape(shape, blockshape): """ >>> blockdims_from_blockshape((10, 10), (4, 3)) ((4, 4, 2), (3, 3, 3, 1)) """ if blockshape is None: raise TypeError("Must supply chunks= keyword argument") if shape is None: raise TypeError("Must supply shape= keyword argument") return tuple((bd,) * (d // bd) + ((d % bd,) if d % bd else ()) for d, bd in zip(shape, blockshape)) class Array(object): """ Parallel Array Parameters ---------- dask : dict Task dependency graph name : string Name of array in dask shape : tuple of ints Shape of the entire array chunks: iterable of tuples block sizes along each dimension """ __slots__ = 'dask', 'name', 'chunks', '_dtype' def __init__(self, dask, name, chunks, dtype=None, shape=None): self.dask = dask self.name = name self.chunks = normalize_chunks(chunks, shape) if dtype is not None: dtype = np.dtype(dtype) self._dtype = dtype @property def _args(self): return (self.dask, self.name, self.chunks, self.dtype) @property def numblocks(self): return tuple(map(len, self.chunks)) @property def shape(self): return tuple(map(sum, self.chunks)) def __len__(self): return sum(self.chunks[0]) def _visualize(self, optimize_graph=False): from dask.dot import dot_graph if optimize_graph: dot_graph(optimize(self.dask, self._keys())) else: dot_graph(self.dask) @property @memoize(key=lambda args, kwargs: (id(args[0]), args[0].name, args[0].chunks)) def dtype(self): if self._dtype is not None: return self._dtype if self.shape: return self[(0,) * self.ndim].compute().dtype else: return self.compute().dtype def __repr__(self): chunks = '(' + ', '.join(map(repr_long_list, self.chunks)) + ')' return ("dask.array<%s, shape=%s, chunks=%s, dtype=%s>" % (self.name, self.shape, chunks, self._dtype)) @property def ndim(self): return len(self.shape) @property def size(self): """ Number of elements in array """ return np.prod(self.shape) @property def nbytes(self): """ Number of bytes in array """ return self.size * self.dtype.itemsize def _keys(self, *args): if self.ndim == 0: return [(self.name,)] ind = len(args) if ind + 1 == self.ndim: return [(self.name,) + args + (i,) for i in range(self.numblocks[ind])] else: return [self._keys(*(args + (i,))) for i in range(self.numblocks[ind])] def __array__(self, dtype=None, **kwargs): x = self.compute() if dtype and x.dtype != dtype: x = x.astype(dtype) if not isinstance(x, np.ndarray): x = np.array(x) return x @wraps(store) def store(self, target, **kwargs): return store([self], [target], **kwargs) def to_hdf5(self, filename, datapath, **kwargs): """ Store array in HDF5 file >>> x.to_hdf5('myfile.hdf5', '/x') # doctest: +SKIP Optionally provide arguments as though to ``h5py.File.create_dataset`` >>> x.to_hdf5('myfile.hdf5', '/x', compression='lzf', shuffle=True) # doctest: +SKIP See also -------- da.store h5py.File.create_dataset """ import h5py with h5py.File(filename) as f: if 'chunks' not in kwargs: kwargs['chunks'] = tuple([c[0] for c in self.chunks]) d = f.require_dataset(datapath, shape=self.shape, dtype=self.dtype, **kwargs) slices = slices_from_chunks(self.chunks) name = next(names) dsk = dict(((name,) + t[1:], (write_hdf5_chunk, filename, datapath, slc, t)) for t, slc in zip(core.flatten(self._keys()), slices)) myget = kwargs.get('get', get) myget(merge(dsk, self.dask), list(dsk.keys())) @wraps(compute) def compute(self, **kwargs): result, = compute(self, **kwargs) return result def __int__(self): return int(self.compute()) def __bool__(self): return bool(self.compute()) __nonzero__ = __bool__ # python 2 def __float__(self): return float(self.compute()) def __complex__(self): return complex(self.compute()) def __getitem__(self, index): # Field access, e.g. x['a'] or x[['a', 'b']] if (isinstance(index, (str, unicode)) or ( isinstance(index, list) and all(isinstance(i, (str, unicode)) for i in index))): if self._dtype is not None and isinstance(index, (str, unicode)): dt = self._dtype[index] elif self._dtype is not None and isinstance(index, list): dt = np.dtype([(name, self._dtype[name]) for name in index]) else: dt = None return elemwise(getarray, self, index, dtype=dt) # Slicing out = next(names) if not isinstance(index, tuple): index = (index,) if all(isinstance(i, slice) and i == slice(None) for i in index): return self dsk, chunks = slice_array(out, self.name, self.chunks, index) return Array(merge(self.dask, dsk), out, chunks, dtype=self._dtype) @wraps(np.dot) def dot(self, other): return tensordot(self, other, axes=((self.ndim-1,), (other.ndim-2,))) @property def T(self): return transpose(self) @wraps(np.transpose) def transpose(self, axes=None): return transpose(self, axes) @wraps(topk) def topk(self, k): return topk(k, self) def astype(self, dtype, **kwargs): """ Copy of the array, cast to a specified type """ return elemwise(partial(np.ndarray.astype, dtype=dtype, **kwargs), self, dtype=dtype) def __abs__(self): return elemwise(operator.abs, self) def __add__(self, other): return elemwise(operator.add, self, other) def __radd__(self, other): return elemwise(operator.add, other, self) def __and__(self, other): return elemwise(operator.and_, self, other) def __rand__(self, other): return elemwise(operator.and_, other, self) def __div__(self, other): return elemwise(operator.div, self, other) def __rdiv__(self, other): return elemwise(operator.div, other, self) def __eq__(self, other): return elemwise(operator.eq, self, other) def __gt__(self, other): return elemwise(operator.gt, self, other) def __ge__(self, other): return elemwise(operator.ge, self, other) def __invert__(self): return elemwise(operator.invert, self) def __lshift__(self, other): return elemwise(operator.lshift, self, other) def __rlshift__(self, other): return elemwise(operator.lshift, other, self) def __lt__(self, other): return elemwise(operator.lt, self, other) def __le__(self, other): return elemwise(operator.le, self, other) def __mod__(self, other): return elemwise(operator.mod, self, other) def __rmod__(self, other): return elemwise(operator.mod, other, self) def __mul__(self, other): return elemwise(operator.mul, self, other) def __rmul__(self, other): return elemwise(operator.mul, other, self) def __ne__(self, other): return elemwise(operator.ne, self, other) def __neg__(self): return elemwise(operator.neg, self) def __or__(self, other): return elemwise(operator.or_, self, other) def __pos__(self): return self def __ror__(self, other): return elemwise(operator.or_, other, self) def __pow__(self, other): return elemwise(operator.pow, self, other) def __rpow__(self, other): return elemwise(operator.pow, other, self) def __rshift__(self, other): return elemwise(operator.rshift, self, other) def __rrshift__(self, other): return elemwise(operator.rshift, other, self) def __sub__(self, other): return elemwise(operator.sub, self, other) def __rsub__(self, other): return elemwise(operator.sub, other, self) def __truediv__(self, other): return elemwise(operator.truediv, self, other) def __rtruediv__(self, other): return elemwise(operator.truediv, other, self) def __floordiv__(self, other): return elemwise(operator.floordiv, self, other) def __rfloordiv__(self, other): return elemwise(operator.floordiv, other, self) def __xor__(self, other): return elemwise(operator.xor, self, other) def __rxor__(self, other): return elemwise(operator.xor, other, self) @wraps(np.any) def any(self, axis=None, keepdims=False): from .reductions import any return any(self, axis=axis, keepdims=keepdims) @wraps(np.all) def all(self, axis=None, keepdims=False): from .reductions import all return all(self, axis=axis, keepdims=keepdims) @wraps(np.min) def min(self, axis=None, keepdims=False): from .reductions import min return min(self, axis=axis, keepdims=keepdims) @wraps(np.max) def max(self, axis=None, keepdims=False): from .reductions import max return max(self, axis=axis, keepdims=keepdims) @wraps(np.argmin) def argmin(self, axis=None): from .reductions import argmin return argmin(self, axis=axis) @wraps(np.argmax) def argmax(self, axis=None): from .reductions import argmax return argmax(self, axis=axis) @wraps(np.sum) def sum(self, axis=None, keepdims=False): from .reductions import sum return sum(self, axis=axis, keepdims=keepdims) @wraps(np.prod) def prod(self, axis=None, keepdims=False): from .reductions import prod return prod(self, axis=axis, keepdims=keepdims) @wraps(np.mean) def mean(self, axis=None, keepdims=False): from .reductions import mean return mean(self, axis=axis, keepdims=keepdims) @wraps(np.std) def std(self, axis=None, keepdims=False, ddof=0): from .reductions import std return std(self, axis=axis, keepdims=keepdims, ddof=ddof) @wraps(np.var) def var(self, axis=None, keepdims=False, ddof=0): from .reductions import var return var(self, axis=axis, keepdims=keepdims, ddof=ddof) def vnorm(self, ord=None, axis=None, keepdims=False): """ Vector norm """ from .reductions import vnorm return vnorm(self, ord=ord, axis=axis, keepdims=keepdims) @wraps(map_blocks) def map_blocks(self, func, chunks=None, dtype=None): return map_blocks(self, func, chunks, dtype=dtype) def map_overlap(self, func, depth, boundary=None, trim=True, **kwargs): """ Map a function over blocks of the array with some overlap We share neighboring zones between blocks of the array, then map a function, then trim away the neighboring strips. Parameters ---------- func: function The function to apply to each extended block depth: int, tuple, or dict The number of cells that each block should share with its neighbors If a tuple or dict this can be different per axis boundary: str how to handle the boundaries. Values include 'reflect', 'periodic' or any constant value like 0 or np.nan trim: bool Whether or not to trim the excess after the map function. Set this to false if your mapping function does this for you. **kwargs: Other keyword arguments valid in ``map_blocks`` Examples -------- >>> x = np.array([1, 1, 2, 3, 3, 3, 2, 1, 1]) >>> x = from_array(x, chunks=5) >>> def derivative(x): ... return x - np.roll(x, 1) >>> y = x.map_overlap(derivative, depth=1, boundary=0) >>> y.compute() array([ 1, 0, 1, 1, 0, 0, -1, -1, 0]) """ from .ghost import map_overlap return map_overlap(self, func, depth, boundary, trim, **kwargs) @wraps(squeeze) def squeeze(self): return squeeze(self) def rechunk(self, chunks): from .rechunk import rechunk return rechunk(self, chunks) def normalize_chunks(chunks, shape=None): """ Normalize chunks to tuple of tuples >>> normalize_chunks((2, 2), shape=(5, 6)) ((2, 2, 1), (2, 2, 2)) >>> normalize_chunks(((2, 2, 1), (2, 2, 2)), shape=(4, 6)) # Idempotent ((2, 2, 1), (2, 2, 2)) >>> normalize_chunks([[2, 2], [3, 3]]) # Cleans up lists to tuples ((2, 2), (3, 3)) >>> normalize_chunks(10, shape=(30, 5)) # Supports integer inputs ((10, 10, 10), (5,)) >>> normalize_chunks((), shape=(0, 0)) # respects null dimensions ((), ()) """ if isinstance(chunks, list): chunks = tuple(chunks) if isinstance(chunks, Number): chunks = (chunks,) * len(shape) if not chunks: if shape is None: chunks = () else: chunks = ((),) * len(shape) if chunks and not isinstance(chunks[0], (tuple, list)): chunks = blockdims_from_blockshape(shape, chunks) chunks = tuple(map(tuple, chunks)) return chunks def from_array(x, chunks, name=None, lock=False, **kwargs): """ Create dask array from something that looks like an array Input must have a ``.shape`` and support numpy-style slicing. The ``chunks`` argument must be one of the following forms: - a blocksize like 1000 - a blockshape like (1000, 1000) - explicit sizes of all blocks along all dimensions like ((1000, 1000, 500), (400, 400)). Examples -------- >>> x = h5py.File('...')['/data/path'] # doctest: +SKIP >>> a = da.from_array(x, chunks=(1000, 1000)) # doctest: +SKIP If your underlying datastore does not support concurrent reads then include the ``lock=True`` keyword argument or ``lock=mylock`` if you want multiple arrays to coordinate around the same lock. >>> a = da.from_array(x, chunks=(1000, 1000), lock=True) # doctest: +SKIP """ chunks = normalize_chunks(chunks, x.shape) name = name or next(names) dsk = getem(name, chunks) if lock is True: lock = Lock() if lock: dsk = dict((k, v + (lock,)) for k, v in dsk.items()) return Array(merge({name: x}, dsk), name, chunks, dtype=x.dtype) def atop(func, out, out_ind, *args, **kwargs): """ Array object version of dask.array.top """ dtype = kwargs.get('dtype', None) arginds = list(partition(2, args)) # [x, ij, y, jk] -> [(x, ij), (y, jk)] numblocks = dict([(a.name, a.numblocks) for a, ind in arginds]) argindsstr = list(concat([(a.name, ind) for a, ind in arginds])) dsk = top(func, out, out_ind, *argindsstr, numblocks=numblocks) # Dictionary mapping {i: 3, j: 4, ...} for i, j, ... the dimensions shapes = dict((a.name, a.shape) for a, _ in arginds) nameinds = [(a.name, i) for a, i in arginds] dims = broadcast_dimensions(nameinds, shapes) shape = tuple(dims[i] for i in out_ind) blockdim_dict = dict((a.name, a.chunks) for a, _ in arginds) chunkss = broadcast_dimensions(nameinds, blockdim_dict) chunks = tuple(chunkss[i] for i in out_ind) dsks = [a.dask for a, _ in arginds] return Array(merge(dsk, *dsks), out, chunks, dtype=dtype) def get(dsk, keys, get=None, **kwargs): """ Specialized get function 1. Handle inlining 2. Use custom score function """ get = get or _globals['get'] or threaded.get dsk2 = optimize(dsk, keys, **kwargs) return get(dsk2, keys, **kwargs) def unpack_singleton(x): """ >>> unpack_singleton([[[[1]]]]) 1 >>> unpack_singleton(np.array(np.datetime64('2000-01-01'))) array(datetime.date(2000, 1, 1), dtype='datetime64[D]') """ while True: try: x = x[0] except (IndexError, TypeError, KeyError): break return x stacked_names = ('stack-%d' % i for i in count(1)) def stack(seq, axis=0): """ Stack arrays along a new axis Given a sequence of dask Arrays form a new dask Array by stacking them along a new dimension (axis=0 by default) Examples -------- Create slices >>> import dask.array as da >>> import numpy as np >>> data = [from_array(np.ones((4, 4)), chunks=(2, 2)) ... for i in range(3)] >>> x = da.stack(data, axis=0) >>> x.shape (3, 4, 4) >>> da.stack(data, axis=1).shape (4, 3, 4) >>> da.stack(data, axis=-1).shape (4, 4, 3) Result is a new dask Array See Also -------- concatenate """ n = len(seq) ndim = len(seq[0].shape) if axis < 0: axis = ndim + axis + 1 if axis > ndim: raise ValueError("Axis must not be greater than number of dimensions" "\nData has %d dimensions, but got axis=%d" % (ndim, axis)) assert len(set(a.chunks for a in seq)) == 1 # same chunks shape = seq[0].shape[:axis] + (len(seq),) + seq[0].shape[axis:] chunks = ( seq[0].chunks[:axis] + ((1,) * n,) + seq[0].chunks[axis:]) name = next(stacked_names) keys = list(product([name], *[range(len(bd)) for bd in chunks])) names = [a.name for a in seq] inputs = [(names[key[axis+1]],) + key[1:axis + 1] + key[axis + 2:] for key in keys] values = [(getarray, inp, (slice(None, None, None),) * axis + (None,) + (slice(None, None, None),) * (ndim - axis)) for inp in inputs] dsk = dict(zip(keys, values)) dsk2 = merge(dsk, *[a.dask for a in seq]) if all(a._dtype is not None for a in seq): dt = reduce(np.promote_types, [a._dtype for a in seq]) else: dt = None return Array(dsk2, name, chunks, dtype=dt) concatenate_names = ('concatenate-%d' % i for i in count(1)) def concatenate(seq, axis=0): """ Concatenate arrays along an existing axis Given a sequence of dask Arrays form a new dask Array by stacking them along an existing dimension (axis=0 by default) Examples -------- Create slices >>> import dask.array as da >>> import numpy as np >>> data = [from_array(np.ones((4, 4)), chunks=(2, 2)) ... for i in range(3)] >>> x = da.concatenate(data, axis=0) >>> x.shape (12, 4) >>> da.concatenate(data, axis=1).shape (4, 12) Result is a new dask Array See Also -------- stack """ n = len(seq) ndim = len(seq[0].shape) if axis < 0: axis = ndim + axis if axis >= ndim: raise ValueError("Axis must be less than than number of dimensions" "\nData has %d dimensions, but got axis=%d" % (ndim, axis)) bds = [a.chunks for a in seq] if not all(len(set(bds[i][j] for i in range(n))) == 1 for j in range(len(bds[0])) if j != axis): raise ValueError("Block shapes do not align") shape = (seq[0].shape[:axis] + (sum(a.shape[axis] for a in seq),) + seq[0].shape[axis + 1:]) chunks = ( seq[0].chunks[:axis] + (sum([bd[axis] for bd in bds], ()),) + seq[0].chunks[axis + 1:]) name = next(concatenate_names) keys = list(product([name], *[range(len(bd)) for bd in chunks])) cum_dims = [0] + list(accumulate(add, [len(a.chunks[axis]) for a in seq])) names = [a.name for a in seq] values = [(names[bisect(cum_dims, key[axis + 1]) - 1],) + key[1:axis + 1] + (key[axis + 1] - cum_dims[bisect(cum_dims, key[axis+1]) - 1],) + key[axis + 2:] for key in keys] dsk = dict(zip(keys, values)) dsk2 = merge(dsk, *[a.dask for a in seq]) if all(a._dtype is not None for a in seq): dt = reduce(np.promote_types, [a._dtype for a in seq]) else: dt = None return Array(dsk2, name, chunks, dtype=dt) @wraps(np.take) def take(a, indices, axis): if not -a.ndim <= axis < a.ndim: raise ValueError('axis=(%s) out of bounds' % axis) if axis < 0: axis += a.ndim return a[(slice(None),) * axis + (indices,)] @wraps(np.transpose) def transpose(a, axes=None): axes = axes or tuple(range(a.ndim))[::-1] return atop(curry(np.transpose, axes=axes), next(names), axes, a, tuple(range(a.ndim)), dtype=a._dtype) @curry def many(a, b, binop=None, reduction=None, **kwargs): """ Apply binary operator to pairwise to sequences, then reduce. >>> many([1, 2, 3], [10, 20, 30], mul, sum) # dot product 140 """ return reduction(map(curry(binop, **kwargs), a, b)) alphabet = 'abcdefghijklmnopqrstuvwxyz' ALPHABET = alphabet.upper() @wraps(np.tensordot) def tensordot(lhs, rhs, axes=2): if isinstance(axes, Iterable): left_axes, right_axes = axes else: left_axes = tuple(range(lhs.ndim - 1, lhs.ndim - axes - 1, -1)) right_axes = tuple(range(0, axes)) if isinstance(left_axes, int): left_axes = (left_axes,) if isinstance(right_axes, int): right_axes = (right_axes,) if isinstance(left_axes, list): left_axes = tuple(left_axes) if isinstance(right_axes, list): right_axes = tuple(right_axes) if len(left_axes) > 1: raise NotImplementedError("Simultaneous Contractions of multiple " "indices not yet supported") left_index = list(alphabet[:lhs.ndim]) right_index = list(ALPHABET[:rhs.ndim]) out_index = left_index + right_index for l, r in zip(left_axes, right_axes): out_index.remove(right_index[r]) out_index.remove(left_index[l]) right_index[r] = left_index[l] if lhs._dtype is not None and rhs._dtype is not None : dt = np.promote_types(lhs._dtype, rhs._dtype) else: dt = None func = many(binop=np.tensordot, reduction=sum, axes=(left_axes, right_axes)) return atop(func, next(names), out_index, lhs, tuple(left_index), rhs, tuple(right_index), dtype=dt) def insert_to_ooc(out, arr): lock = Lock() def store(x, index): with lock: out[index] = np.asanyarray(x) return None slices = slices_from_chunks(arr.chunks) name = 'store-%s' % arr.name dsk = dict(((name,) + t[1:], (store, t, slc)) for t, slc in zip(core.flatten(arr._keys()), slices)) return dsk def partial_by_order(op, other): """ >>> f = partial_by_order(add, [(1, 10)]) >>> f(5) 15 """ def f(*args): args2 = list(args) for i, arg in other: args2.insert(i, arg) return op(*args2) return f def elemwise(op, *args, **kwargs): """ Apply elementwise function across arguments Respects broadcasting rules Examples -------- >>> elemwise(add, x, y) # doctest: +SKIP >>> elemwise(sin, x) # doctest: +SKIP See also -------- atop """ name = kwargs.get('name') or next(names) out_ndim = max(len(arg.shape) if isinstance(arg, Array) else 0 for arg in args) expr_inds = tuple(range(out_ndim))[::-1] arrays = [arg for arg in args if isinstance(arg, Array)] other = [(i, arg) for i, arg in enumerate(args) if not isinstance(arg, Array)] if 'dtype' in kwargs: dt = kwargs['dtype'] elif not all(a._dtype is not None for a in arrays): dt = None else: vals = [np.empty((1,) * a.ndim, dtype=a.dtype) if hasattr(a, 'dtype') else a for a in args] try: dt = op(*vals).dtype except AttributeError: dt = None if other: op2 = partial_by_order(op, other) else: op2 = op return atop(op2, name, expr_inds, *concat((a, tuple(range(a.ndim)[::-1])) for a in arrays), dtype=dt) def wrap_elemwise(func, **kwargs): """ Wrap up numpy function into dask.array """ f = partial(elemwise, func, **kwargs) f.__doc__ = func.__doc__ f.__name__ = func.__name__ return f # ufuncs, copied from this page: # http://docs.scipy.org/doc/numpy/reference/ufuncs.html # math operations logaddexp = wrap_elemwise(np.logaddexp) logaddexp2 = wrap_elemwise(np.logaddexp2) conj = wrap_elemwise(np.conj) exp = wrap_elemwise(np.exp) log = wrap_elemwise(np.log) log2 = wrap_elemwise(np.log2) log10 = wrap_elemwise(np.log10) log1p = wrap_elemwise(np.log1p) expm1 = wrap_elemwise(np.expm1) sqrt = wrap_elemwise(np.sqrt) square = wrap_elemwise(np.square) # trigonometric functions sin = wrap_elemwise(np.sin) cos = wrap_elemwise(np.cos) tan = wrap_elemwise(np.tan) arcsin = wrap_elemwise(np.arcsin) arccos = wrap_elemwise(np.arccos) arctan = wrap_elemwise(np.arctan) arctan2 = wrap_elemwise(np.arctan2) hypot = wrap_elemwise(np.hypot) sinh = wrap_elemwise(np.sinh) cosh = wrap_elemwise(np.cosh) tanh = wrap_elemwise(np.tanh) arcsinh = wrap_elemwise(np.arcsinh) arccosh = wrap_elemwise(np.arccosh) arctanh = wrap_elemwise(np.arctanh) deg2rad = wrap_elemwise(np.deg2rad) rad2deg = wrap_elemwise(np.rad2deg) # comparison functions logical_and = wrap_elemwise(np.logical_and, dtype='bool') logical_or = wrap_elemwise(np.logical_or, dtype='bool') logical_xor = wrap_elemwise(np.logical_xor, dtype='bool') logical_not = wrap_elemwise(np.logical_not, dtype='bool') maximum = wrap_elemwise(np.maximum) minimum = wrap_elemwise(np.minimum) fmax = wrap_elemwise(np.fmax) fmin = wrap_elemwise(np.fmin) # floating functions isreal = wrap_elemwise(np.isreal, dtype='bool') iscomplex = wrap_elemwise(np.iscomplex, dtype='bool') isfinite = wrap_elemwise(np.isfinite, dtype='bool') isinf = wrap_elemwise(np.isinf, dtype='bool') isnan = wrap_elemwise(np.isnan, dtype='bool') signbit = wrap_elemwise(np.signbit, dtype='bool') copysign = wrap_elemwise(np.copysign) nextafter = wrap_elemwise(np.nextafter) # modf: see below ldexp = wrap_elemwise(np.ldexp) # frexp: see below fmod = wrap_elemwise(np.fmod) floor = wrap_elemwise(np.floor) ceil = wrap_elemwise(np.ceil) trunc = wrap_elemwise(np.trunc) # more math routines, from this page: # http://docs.scipy.org/doc/numpy/reference/routines.math.html degrees = wrap_elemwise(np.degrees) radians = wrap_elemwise(np.radians) rint = wrap_elemwise(np.rint) fix = wrap_elemwise(np.fix) angle = wrap_elemwise(np.angle) real = wrap_elemwise(np.real) imag = wrap_elemwise(np.imag) clip = wrap_elemwise(np.clip) fabs = wrap_elemwise(np.fabs) sign = wrap_elemwise(np.fabs) def frexp(x): tmp = elemwise(np.frexp, x) left = next(names) right = next(names) ldsk = dict(((left,) + key[1:], (getitem, key, 0)) for key in core.flatten(tmp._keys())) rdsk = dict(((right,) + key[1:], (getitem, key, 1)) for key in core.flatten(tmp._keys())) if x._dtype is not None: a = np.empty((1,), dtype=x._dtype) l, r = np.frexp(a) ldt = l.dtype rdt = r.dtype else: ldt = None rdt = None L = Array(merge(tmp.dask, ldsk), left, chunks=tmp.chunks, dtype=ldt) R = Array(merge(tmp.dask, rdsk), right, chunks=tmp.chunks, dtype=rdt) return L, R frexp.__doc__ = np.frexp def modf(x): tmp = elemwise(np.modf, x) left = next(names) right = next(names) ldsk = dict(((left,) + key[1:], (getitem, key, 0)) for key in core.flatten(tmp._keys())) rdsk = dict(((right,) + key[1:], (getitem, key, 1)) for key in core.flatten(tmp._keys())) if x._dtype is not None: a = np.empty((1,), dtype=x._dtype) l, r = np.modf(a) ldt = l.dtype rdt = r.dtype else: ldt = None rdt = None L = Array(merge(tmp.dask, ldsk), left, chunks=tmp.chunks, dtype=ldt) R = Array(merge(tmp.dask, rdsk), right, chunks=tmp.chunks, dtype=rdt) return L, R modf.__doc__ = np.modf @wraps(np.around) def around(x, decimals=0): return map_blocks(x, partial(np.around, decimals=decimals), dtype=x.dtype) def isnull(values): """ pandas.isnull for dask arrays """ import pandas as pd return elemwise(pd.isnull, values, dtype='bool') def notnull(values): """ pandas.notnull for dask arrays """ return ~isnull(values) @wraps(numpy_compat.isclose) def isclose(arr1, arr2, rtol=1e-5, atol=1e-8, equal_nan=False): func = partial(numpy_compat.isclose, rtol=rtol, atol=atol, equal_nan=equal_nan) return elemwise(func, arr1, arr2, dtype='bool') def variadic_choose(a, *choices): return np.choose(a, choices) @wraps(np.choose) def choose(a, choices): return elemwise(variadic_choose, a, *choices) where_error_message = """ The dask.array version of where only handles the three argument case. da.where(x > 0, x, 0) and not the single argument case da.where(x > 0) This is because dask.array operations must be able to infer the shape of their outputs prior to execution. The number of positive elements of x requires execution. See the ``np.where`` docstring for examples and the following link for a more thorough explanation: http://dask.pydata.org/en/latest/array-overview.html#construct """.strip() @wraps(np.where) def where(condition, x=None, y=None): if x is None or y is None: raise TypeError(where_error_message) return choose(condition, [y, x]) @wraps(chunk.coarsen) def coarsen(reduction, x, axes): if not all(bd % div == 0 for i, div in axes.items() for bd in x.chunks[i]): raise ValueError( "Coarsening factor does not align with block dimensions") if 'dask' in inspect.getfile(reduction): reduction = getattr(np, reduction.__name__) name = next(names) dsk = dict(((name,) + key[1:], (chunk.coarsen, reduction, key, axes)) for key in core.flatten(x._keys())) chunks = tuple(tuple(int(bd / axes.get(i, 1)) for bd in bds) for i, bds in enumerate(x.chunks)) if x._dtype is not None: dt = reduction(np.empty((1,) * x.ndim, dtype=x.dtype)).dtype else: dt = None return Array(merge(x.dask, dsk), name, chunks, dtype=dt) def split_at_breaks(array, breaks, axis=0): """ Split an array into a list of arrays (using slices) at the given breaks >>> split_at_breaks(np.arange(6), [3, 5]) [array([0, 1, 2]), array([3, 4]), array([5])] """ padded_breaks = concat([[None], breaks, [None]]) slices = [slice(i, j) for i, j in sliding_window(2, padded_breaks)] preslice = (slice(None),) * axis split_array = [array[preslice + (s,)] for s in slices] return split_array @wraps(np.insert) def insert(arr, obj, values, axis): # axis is a required argument here to avoid needing to deal with the numpy # default case (which reshapes the array to make it flat) if not -arr.ndim <= axis < arr.ndim: raise IndexError('axis %r is out of bounds for an array of dimension ' '%s' % (axis, arr.ndim)) if axis < 0: axis += arr.ndim if isinstance(obj, slice): obj = np.arange(*obj.indices(arr.shape[axis])) obj = np.asarray(obj) scalar_obj = obj.ndim == 0 if scalar_obj: obj = np.atleast_1d(obj) obj = np.where(obj < 0, obj + arr.shape[axis], obj) if (np.diff(obj) < 0).any(): raise NotImplementedError( 'da.insert only implemented for monotonic ``obj`` argument') split_arr = split_at_breaks(arr, np.unique(obj), axis) if getattr(values, 'ndim', 0) == 0: # we need to turn values into a dask array name = next(names) dtype = getattr(values, 'dtype', type(values)) values = Array({(name,): values}, name, chunks=(), dtype=dtype) values_shape = tuple(len(obj) if axis == n else s for n, s in enumerate(arr.shape)) values = broadcast_to(values, values_shape) elif scalar_obj: values = values[(slice(None),) * axis + (None,)] values_chunks = tuple(values_bd if axis == n else arr_bd for n, (arr_bd, values_bd) in enumerate(zip(arr.chunks, values.chunks))) values = values.rechunk(values_chunks) counts = np.bincount(obj)[:-1] values_breaks = np.cumsum(counts[counts > 0]) split_values = split_at_breaks(values, values_breaks, axis) interleaved = list(interleave([split_arr, split_values])) interleaved = [i for i in interleaved if i.nbytes] return concatenate(interleaved, axis=axis) @wraps(chunk.broadcast_to) def broadcast_to(x, shape): shape = tuple(shape) ndim_new = len(shape) - x.ndim if ndim_new < 0 or any(new != old for new, old in zip(shape[ndim_new:], x.shape) if old != 1): raise ValueError('cannot broadcast shape %s to shape %s' % (x.shape, shape)) name = next(names) chunks = (tuple((s,) for s in shape[:ndim_new]) + tuple(bd if old > 1 else (new,) for bd, old, new in zip(x.chunks, x.shape, shape[ndim_new:]))) dsk = dict(((name,) + (0,) * ndim_new + key[1:], (chunk.broadcast_to, key, shape[:ndim_new] + tuple(bd[i] for i, bd in zip(key[1:], chunks[ndim_new:])))) for key in core.flatten(x._keys())) return Array(merge(dsk, x.dask), name, chunks, dtype=x.dtype) def offset_func(func, offset, *args): """ Offsets inputs by offset >>> double = lambda x: x * 2 >>> f = offset_func(double, (10,)) >>> f(1) 22 >>> f(300) 620 """ def _offset(*args): args2 = list(map(add, args, offset)) return func(*args2) with ignoring(Exception): _offset.__name__ = 'offset_' + func.__name__ return _offset fromfunction_names = ('fromfunction-%d' % i for i in count(1)) @wraps(np.fromfunction) def fromfunction(func, chunks=None, shape=None, dtype=None): name = next(fromfunction_names) if chunks: chunks = normalize_chunks(chunks, shape) keys = list(product([name], *[range(len(bd)) for bd in chunks])) aggdims = [list(accumulate(add, (0,) + bd[:-1])) for bd in chunks] offsets = list(product(*aggdims)) shapes = list(product(*chunks)) values = [(np.fromfunction, offset_func(func, offset), shape) for offset, shape in zip(offsets, shapes)] dsk = dict(zip(keys, values)) return Array(dsk, name, chunks, dtype=dtype) @wraps(np.unique) def unique(x): name = next(names) dsk = dict(((name, i), (np.unique, key)) for i, key in enumerate(x._keys())) parts = get(merge(dsk, x.dask), list(dsk.keys())) return np.unique(np.concatenate(parts)) def write_hdf5_chunk(fn, datapath, index, data): import h5py with h5py.File(fn) as f: d = f[datapath] d[index] = data @wraps(np.bincount) def bincount(x, weights=None, minlength=None): if minlength is None: raise TypeError("Must specify minlength argument in da.bincount") assert x.ndim == 1 if weights is not None: assert weights.chunks == x.chunks # Call np.bincount on each block, possibly with weights name = 'bincount' + next(tokens) if weights is not None: dsk = dict(((name, i), (np.bincount, (x.name, i), (weights.name, i), minlength)) for i, _ in enumerate(x._keys())) dtype = 'f8' else: dsk = dict(((name, i), (np.bincount, (x.name, i), None, minlength)) for i, _ in enumerate(x._keys())) dtype = 'i8' # Sum up all of the intermediate bincounts per block name = 'bincount-sum' + next(tokens) dsk[(name, 0)] = (np.sum, (list, list(dsk)), 0) chunks = ((minlength,),) dsk.update(x.dask) if weights is not None: dsk.update(weights.dask) return Array(dsk, name, chunks, dtype)
{ "content_hash": "0c72d706c727ab5b2f232cfcd95723e8", "timestamp": "", "source": "github", "line_count": 1779, "max_line_length": 107, "avg_line_length": 31.19673974142777, "alnum_prop": 0.5583163660606497, "repo_name": "esc/dask", "id": "2ed778ebdd97a2ac13cde12109901982e6bb93df", "size": "55499", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dask/array/core.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "51" }, { "name": "Python", "bytes": "511448" }, { "name": "Shell", "bytes": "93" } ], "symlink_target": "" }
from typing import Optional, Tuple, Union import torch from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...utils import deprecate class DDIMPipeline(DiffusionPipeline): r""" This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) Parameters: unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image. scheduler ([`SchedulerMixin`]): A scheduler to be used in combination with `unet` to denoise the encoded image. Can be one of [`DDPMScheduler`], or [`DDIMScheduler`]. """ def __init__(self, unet, scheduler): super().__init__() self.register_modules(unet=unet, scheduler=scheduler) @torch.no_grad() def __call__( self, batch_size: int = 1, generator: Optional[torch.Generator] = None, eta: float = 0.0, num_inference_steps: int = 50, use_clipped_model_output: Optional[bool] = None, output_type: Optional[str] = "pil", return_dict: bool = True, **kwargs, ) -> Union[ImagePipelineOutput, Tuple]: r""" Args: batch_size (`int`, *optional*, defaults to 1): The number of images to generate. generator (`torch.Generator`, *optional*): A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation deterministic. eta (`float`, *optional*, defaults to 0.0): The eta parameter which controls the scale of the variance (0 is DDIM and 1 is one type of DDPM). num_inference_steps (`int`, *optional*, defaults to 50): The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference. use_clipped_model_output (`bool`, *optional*, defaults to `None`): if `True` or `False`, see documentation for `DDIMScheduler.step`. If `None`, nothing is passed downstream to the scheduler. So use `None` for schedulers which don't support this argument. output_type (`str`, *optional*, defaults to `"pil"`): The output format of the generate image. Choose between [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. return_dict (`bool`, *optional*, defaults to `True`): Whether or not to return a [`~pipeline_utils.ImagePipelineOutput`] instead of a plain tuple. Returns: [`~pipeline_utils.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images. """ if generator is not None and generator.device.type != self.device.type and self.device.type != "mps": message = ( f"The `generator` device is `{generator.device}` and does not match the pipeline " f"device `{self.device}`, so the `generator` will be ignored. " f'Please use `generator=torch.Generator(device="{self.device}")` instead.' ) deprecate( "generator.device == 'cpu'", "0.11.0", message, ) generator = None # Sample gaussian noise to begin loop if isinstance(self.unet.sample_size, int): image_shape = (batch_size, self.unet.in_channels, self.unet.sample_size, self.unet.sample_size) else: image_shape = (batch_size, self.unet.in_channels, *self.unet.sample_size) if self.device.type == "mps": # randn does not work reproducibly on mps image = torch.randn(image_shape, generator=generator) image = image.to(self.device) else: image = torch.randn(image_shape, generator=generator, device=self.device) # set step values self.scheduler.set_timesteps(num_inference_steps) for t in self.progress_bar(self.scheduler.timesteps): # 1. predict noise model_output model_output = self.unet(image, t).sample # 2. predict previous mean of image x_t-1 and add variance depending on eta # eta corresponds to η in paper and should be between [0, 1] # do x_t -> x_t-1 image = self.scheduler.step( model_output, t, image, eta=eta, use_clipped_model_output=use_clipped_model_output, generator=generator ).prev_sample image = (image / 2 + 0.5).clamp(0, 1) image = image.cpu().permute(0, 2, 3, 1).numpy() if output_type == "pil": image = self.numpy_to_pil(image) if not return_dict: return (image,) return ImagePipelineOutput(images=image)
{ "content_hash": "986e08098097b6e1392745a488813c97", "timestamp": "", "source": "github", "line_count": 112, "max_line_length": 119, "avg_line_length": 45.580357142857146, "alnum_prop": 0.602154750244858, "repo_name": "huggingface/diffusers", "id": "b9e590dea64621ea9eada0fd5d962e58943a2775", "size": "5713", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/diffusers/pipelines/ddim/pipeline_ddim.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "7145" }, { "name": "Makefile", "bytes": "2769" }, { "name": "Python", "bytes": "2481515" } ], "symlink_target": "" }
from __future__ import with_statement import warnings from django.conf import settings, UserSettingsHolder from django.core import mail from django.test.signals import template_rendered, setting_changed from django.template import Template, loader, TemplateDoesNotExist from django.template.loaders import cached from django.utils.translation import deactivate from django.utils.functional import wraps __all__ = ( 'Approximate', 'ContextList', 'get_runner', 'override_settings', 'setup_test_environment', 'teardown_test_environment', ) RESTORE_LOADERS_ATTR = '_original_template_source_loaders' class Approximate(object): def __init__(self, val, places=7): self.val = val self.places = places def __repr__(self): return repr(self.val) def __eq__(self, other): if self.val == other: return True return round(abs(self.val-other), self.places) == 0 class ContextList(list): """A wrapper that provides direct key access to context items contained in a list of context objects. """ def __getitem__(self, key): if isinstance(key, basestring): for subcontext in self: if key in subcontext: return subcontext[key] raise KeyError(key) else: return super(ContextList, self).__getitem__(key) def __contains__(self, key): try: value = self[key] except KeyError: return False return True def instrumented_test_render(self, context): """ An instrumented Template render method, providing a signal that can be intercepted by the test system Client """ template_rendered.send(sender=self, template=self, context=context) return self.nodelist.render(context) def setup_test_environment(): """Perform any global pre-test setup. This involves: - Installing the instrumented test renderer - Set the email backend to the locmem email backend. - Setting the active locale to match the LANGUAGE_CODE setting. """ Template.original_render = Template._render Template._render = instrumented_test_render mail.original_email_backend = settings.EMAIL_BACKEND settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' mail.outbox = [] deactivate() def teardown_test_environment(): """Perform any global post-test teardown. This involves: - Restoring the original test renderer - Restoring the email sending functions """ Template._render = Template.original_render del Template.original_render settings.EMAIL_BACKEND = mail.original_email_backend del mail.original_email_backend del mail.outbox def get_warnings_state(): """ Returns an object containing the state of the warnings module """ # There is no public interface for doing this, but this implementation of # get_warnings_state and restore_warnings_state appears to work on Python # 2.4 to 2.7. return warnings.filters[:] def restore_warnings_state(state): """ Restores the state of the warnings module when passed an object that was returned by get_warnings_state() """ warnings.filters = state[:] def get_runner(settings, test_runner_class=None): if not test_runner_class: test_runner_class = settings.TEST_RUNNER test_path = test_runner_class.split('.') # Allow for Python 2.5 relative paths if len(test_path) > 1: test_module_name = '.'.join(test_path[:-1]) else: test_module_name = '.' test_module = __import__(test_module_name, {}, {}, test_path[-1]) test_runner = getattr(test_module, test_path[-1]) return test_runner def setup_test_template_loader(templates_dict, use_cached_loader=False): """ Changes Django to only find templates from within a dictionary (where each key is the template name and each value is the corresponding template content to return). Use meth:`restore_template_loaders` to restore the original loaders. """ if hasattr(loader, RESTORE_LOADERS_ATTR): raise Exception("loader.%s already exists" % RESTORE_LOADERS_ATTR) def test_template_loader(template_name, template_dirs=None): "A custom template loader that loads templates from a dictionary." try: return (templates_dict[template_name], "test:%s" % template_name) except KeyError: raise TemplateDoesNotExist(template_name) if use_cached_loader: template_loader = cached.Loader(('test_template_loader',)) template_loader._cached_loaders = (test_template_loader,) else: template_loader = test_template_loader setattr(loader, RESTORE_LOADERS_ATTR, loader.template_source_loaders) loader.template_source_loaders = (template_loader,) return template_loader def restore_template_loaders(): """ Restores the original template loaders after :meth:`setup_test_template_loader` has been run. """ loader.template_source_loaders = getattr(loader, RESTORE_LOADERS_ATTR) delattr(loader, RESTORE_LOADERS_ATTR) class OverrideSettingsHolder(UserSettingsHolder): """ A custom setting holder that sends a signal upon change. """ def __setattr__(self, name, value): UserSettingsHolder.__setattr__(self, name, value) setting_changed.send(sender=self.__class__, setting=name, value=value) class override_settings(object): """ Acts as either a decorator, or a context manager. If it's a decorator it takes a function and returns a wrapped function. If it's a contextmanager it's used with the ``with`` statement. In either event entering/exiting are called before and after, respectively, the function/block is executed. """ def __init__(self, **kwargs): self.options = kwargs self.wrapped = settings._wrapped def __enter__(self): self.enable() def __exit__(self, exc_type, exc_value, traceback): self.disable() def __call__(self, test_func): from django.test import TransactionTestCase if isinstance(test_func, type) and issubclass(test_func, TransactionTestCase): original_pre_setup = test_func._pre_setup original_post_teardown = test_func._post_teardown def _pre_setup(innerself): self.enable() original_pre_setup(innerself) def _post_teardown(innerself): original_post_teardown(innerself) self.disable() test_func._pre_setup = _pre_setup test_func._post_teardown = _post_teardown return test_func else: @wraps(test_func) def inner(*args, **kwargs): with self: return test_func(*args, **kwargs) return inner def enable(self): override = OverrideSettingsHolder(settings._wrapped) for key, new_value in self.options.items(): setattr(override, key, new_value) settings._wrapped = override def disable(self): settings._wrapped = self.wrapped
{ "content_hash": "6a8b9411d1f42b611844d45df0a530d4", "timestamp": "", "source": "github", "line_count": 224, "max_line_length": 86, "avg_line_length": 32.174107142857146, "alnum_prop": 0.6567226307756348, "repo_name": "mixman/djangodev", "id": "87f23118971aea50bd781dbc956bd1bcf8ad7692", "size": "7207", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "django/test/utils.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "88362" }, { "name": "Python", "bytes": "7834206" }, { "name": "Shell", "bytes": "9076" } ], "symlink_target": "" }
""" @brief test log(time=1s) You should indicate a time in seconds. The program ``run_unittests.py`` will sort all test files by increasing time and run them. """ import os import unittest from pyquickhelper.loghelper import fLOG from pyensae.sql import TextFileColumns class TestFileColumns (unittest.TestCase): def test_read_csv_file(self): fLOG( __file__, self._testMethodName, OutputPrint=__name__ == "__main__") file = os.path.join( os.path.abspath( os.path.split(__file__)[0]), "data", "ACA.PA.txt") f = TextFileColumns(file, fLOG=fLOG) f.open() rows = list(f) f.close() for li in rows[:5]: fLOG(li) assert isinstance(li, dict) assert isinstance(li["Adj_Close"], float) if __name__ == "__main__": unittest.main()
{ "content_hash": "8588f4cf0acc95ad5583b7be607c9e40", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 71, "avg_line_length": 24.83783783783784, "alnum_prop": 0.5560391730141458, "repo_name": "sdpython/pyensae", "id": "b83ba371ad2f9184f8bfaf3274f7c04a897b4599", "size": "919", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_unittests/ut_sql/test_file_columns.py", "mode": "33188", "license": "mit", "language": [ { "name": "ANTLR", "bytes": "129141" }, { "name": "Batchfile", "bytes": "1818" }, { "name": "CSS", "bytes": "689" }, { "name": "Jupyter Notebook", "bytes": "750986" }, { "name": "Python", "bytes": "3101205" }, { "name": "R", "bytes": "3146" }, { "name": "Shell", "bytes": "715" } ], "symlink_target": "" }
"""Knowledge in learning, Chapter 19""" from random import shuffle from math import log from utils import powerset from collections import defaultdict from itertools import combinations, product from logic import (FolKB, constant_symbols, predicate_symbols, standardize_variables, variables, is_definite_clause, subst, expr, Expr) # ______________________________________________________________________________ def current_best_learning(examples, h, examples_so_far=[]): """ [Figure 19.2] The hypothesis is a list of dictionaries, with each dictionary representing a disjunction.""" if not examples: return h e = examples[0] if is_consistent(e, h): return current_best_learning(examples[1:], h, examples_so_far + [e]) elif false_positive(e, h): for h2 in specializations(examples_so_far + [e], h): h3 = current_best_learning(examples[1:], h2, examples_so_far + [e]) if h3 != 'FAIL': return h3 elif false_negative(e, h): for h2 in generalizations(examples_so_far + [e], h): h3 = current_best_learning(examples[1:], h2, examples_so_far + [e]) if h3 != 'FAIL': return h3 return 'FAIL' def specializations(examples_so_far, h): """Specialize the hypothesis by adding AND operations to the disjunctions""" hypotheses = [] for i, disj in enumerate(h): for e in examples_so_far: for k, v in e.items(): if k in disj or k == 'GOAL': continue h2 = h[i].copy() h2[k] = '!' + v h3 = h.copy() h3[i] = h2 if check_all_consistency(examples_so_far, h3): hypotheses.append(h3) shuffle(hypotheses) return hypotheses def generalizations(examples_so_far, h): """Generalize the hypothesis. First delete operations (including disjunctions) from the hypothesis. Then, add OR operations.""" hypotheses = [] # Delete disjunctions disj_powerset = powerset(range(len(h))) for disjs in disj_powerset: h2 = h.copy() for d in reversed(list(disjs)): del h2[d] if check_all_consistency(examples_so_far, h2): hypotheses += h2 # Delete AND operations in disjunctions for i, disj in enumerate(h): a_powerset = powerset(disj.keys()) for attrs in a_powerset: h2 = h[i].copy() for a in attrs: del h2[a] if check_all_consistency(examples_so_far, [h2]): h3 = h.copy() h3[i] = h2.copy() hypotheses += h3 # Add OR operations if hypotheses == [] or hypotheses == [{}]: hypotheses = add_or(examples_so_far, h) else: hypotheses.extend(add_or(examples_so_far, h)) shuffle(hypotheses) return hypotheses def add_or(examples_so_far, h): """Adds an OR operation to the hypothesis. The AND operations in the disjunction are generated by the last example (which is the problematic one).""" ors = [] e = examples_so_far[-1] attrs = {k: v for k, v in e.items() if k != 'GOAL'} a_powerset = powerset(attrs.keys()) for c in a_powerset: h2 = {} for k in c: h2[k] = attrs[k] if check_negative_consistency(examples_so_far, h2): h3 = h.copy() h3.append(h2) ors.append(h3) return ors # ______________________________________________________________________________ def version_space_learning(examples): """ [Figure 19.3] The version space is a list of hypotheses, which in turn are a list of dictionaries/disjunctions.""" V = all_hypotheses(examples) for e in examples: if V: V = version_space_update(V, e) return V def version_space_update(V, e): return [h for h in V if is_consistent(e, h)] def all_hypotheses(examples): """Builds a list of all the possible hypotheses""" values = values_table(examples) h_powerset = powerset(values.keys()) hypotheses = [] for s in h_powerset: hypotheses.extend(build_attr_combinations(s, values)) hypotheses.extend(build_h_combinations(hypotheses)) return hypotheses def values_table(examples): """Builds a table with all the possible values for each attribute. Returns a dictionary with keys the attribute names and values a list with the possible values for the corresponding attribute.""" values = defaultdict(lambda: []) for e in examples: for k, v in e.items(): if k == 'GOAL': continue mod = '!' if e['GOAL']: mod = '' if mod + v not in values[k]: values[k].append(mod + v) values = dict(values) return values def build_attr_combinations(s, values): """Given a set of attributes, builds all the combinations of values. If the set holds more than one attribute, recursively builds the combinations.""" if len(s) == 1: # s holds just one attribute, return its list of values k = values[s[0]] h = [[{s[0]: v}] for v in values[s[0]]] return h h = [] for i, a in enumerate(s): rest = build_attr_combinations(s[i+1:], values) for v in values[a]: o = {a: v} for r in rest: t = o.copy() for d in r: t.update(d) h.append([t]) return h def build_h_combinations(hypotheses): """Given a set of hypotheses, builds and returns all the combinations of the hypotheses.""" h = [] h_powerset = powerset(range(len(hypotheses))) for s in h_powerset: t = [] for i in s: t.extend(hypotheses[i]) h.append(t) return h # ______________________________________________________________________________ def minimal_consistent_det(E, A): """Returns a minimal set of attributes which give consistent determination""" n = len(A) for i in range(n + 1): for A_i in combinations(A, i): if consistent_det(A_i, E): return set(A_i) def consistent_det(A, E): """Checks if the attributes(A) is consistent with the examples(E)""" H = {} for e in E: attr_values = tuple(e[attr] for attr in A) if attr_values in H and H[attr_values] != e['GOAL']: return False H[attr_values] = e['GOAL'] return True # ______________________________________________________________________________ class FOIL_container(FolKB): """Holds the kb and other necessary elements required by FOIL""" def __init__(self, clauses=[]): self.const_syms = set() self.pred_syms = set() FolKB.__init__(self, clauses) def tell(self, sentence): if is_definite_clause(sentence): self.clauses.append(sentence) self.const_syms.update(constant_symbols(sentence)) self.pred_syms.update(predicate_symbols(sentence)) else: raise Exception("Not a definite clause: {}".format(sentence)) def foil(self, examples, target): """Learns a list of first-order horn clauses 'examples' is a tuple: (positive_examples, negative_examples). positive_examples and negative_examples are both lists which contain substitutions.""" clauses = [] pos_examples = examples[0] neg_examples = examples[1] while pos_examples: clause, extended_pos_examples = self.new_clause((pos_examples, neg_examples), target) # remove positive examples covered by clause pos_examples = self.update_examples(target, pos_examples, extended_pos_examples) clauses.append(clause) return clauses def new_clause(self, examples, target): """Finds a horn clause which satisfies part of the positive examples but none of the negative examples. The horn clause is specified as [consequent, list of antecedents] Return value is the tuple (horn_clause, extended_positive_examples)""" clause = [target, []] # [positive_examples, negative_examples] extended_examples = examples while extended_examples[1]: l = self.choose_literal(self.new_literals(clause), extended_examples) clause[1].append(l) extended_examples = [sum([list(self.extend_example(example, l)) for example in extended_examples[i]], []) for i in range(2)] return (clause, extended_examples[0]) def extend_example(self, example, literal): """Generates extended examples which satisfy the literal""" # find all substitutions that satisfy literal for s in self.ask_generator(subst(example, literal)): s.update(example) yield s def new_literals(self, clause): """Generates new literals based on known predicate symbols. Generated literal must share atleast one variable with clause""" share_vars = variables(clause[0]) for l in clause[1]: share_vars.update(variables(l)) for pred, arity in self.pred_syms: new_vars = {standardize_variables(expr('x')) for _ in range(arity - 1)} for args in product(share_vars.union(new_vars), repeat=arity): if any(var in share_vars for var in args): yield Expr(pred, *[var for var in args]) def choose_literal(self, literals, examples): """Chooses the best literal based on the information gain""" def gain(l): pre_pos = len(examples[0]) pre_neg = len(examples[1]) extended_examples = [sum([list(self.extend_example(example, l)) for example in examples[i]], []) for i in range(2)] post_pos = len(extended_examples[0]) post_neg = len(extended_examples[1]) if pre_pos + pre_neg == 0 or post_pos + post_neg == 0: return -1 # number of positive example that are represented in extended_examples T = 0 for example in examples[0]: def represents(d): return all(d[x] == example[x] for x in example) if any(represents(l_) for l_ in extended_examples[0]): T += 1 return T * log((post_pos*(pre_pos + pre_neg) + 1e-4) / ((post_pos + post_neg)*pre_pos)) return max(literals, key=gain) def update_examples(self, target, examples, extended_examples): """Adds to the kb those examples what are represented in extended_examples List of omitted examples is returned""" uncovered = [] for example in examples: def represents(d): return all(d[x] == example[x] for x in example) if any(represents(l) for l in extended_examples): self.tell(subst(example, target)) else: uncovered.append(example) return uncovered # ______________________________________________________________________________ def check_all_consistency(examples, h): """Check for the consistency of all examples under h""" for e in examples: if not is_consistent(e, h): return False return True def check_negative_consistency(examples, h): """Check if the negative examples are consistent under h""" for e in examples: if e['GOAL']: continue if not is_consistent(e, [h]): return False return True def disjunction_value(e, d): """The value of example e under disjunction d""" for k, v in d.items(): if v[0] == '!': # v is a NOT expression # e[k], thus, should not be equal to v if e[k] == v[1:]: return False elif e[k] != v: return False return True def guess_value(e, h): """Guess value of example e under hypothesis h""" for d in h: if disjunction_value(e, d): return True return False def is_consistent(e, h): return e["GOAL"] == guess_value(e, h) def false_positive(e, h): if e["GOAL"] == False: if guess_value(e, h): return True return False def false_negative(e, h): if e["GOAL"] == True: if not guess_value(e, h): return True return False
{ "content_hash": "3994975e55d9c4f35561b0c3fd9aecb4", "timestamp": "", "source": "github", "line_count": 409, "max_line_length": 99, "avg_line_length": 30.941320293398533, "alnum_prop": 0.5574081390754643, "repo_name": "SnShine/aima-python", "id": "6fe09acd2f7d73b6696ba9a17fd33f9da123d737", "size": "12655", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "knowledge.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "9816" }, { "name": "Jupyter Notebook", "bytes": "2024152" }, { "name": "Makefile", "bytes": "562" }, { "name": "Python", "bytes": "485842" } ], "symlink_target": "" }
import unittest from captaincloud.processes.task_runner import TaskRunner from captaincloud.task import Task, TaskImpl, field from captaincloud.task.registry import TaskRegistry class TestTaskRunner(unittest.TestCase): """Test task runner""" def setUp(self): self.task_runner = TaskRunner() class DivideTaskImpl(TaskImpl): def run(self): self.task.Output.ans = self.task.Input.x / self.task.Input.y @TaskRegistry.register class DivideTask(Task): ID = 'add' impl = DivideTaskImpl class Input: x = field.FloatField() y = field.FloatField() class Output: ans = field.FloatField() self.task = DivideTask(x=20, y=2) self.error_task = DivideTask(x=20, y=0) def test_add_task(self): self.assertTrue(self.task_runner.is_empty()) run_id = self.task_runner.add(self.task) self.assertTrue(hasattr(self.task, 'RUN_ID')) self.assertEqual(self.task.RUN_ID, run_id) self.assertEqual( self.task_runner.get_status(run_id=run_id), TaskRunner.WAITING) self.assertFalse(self.task_runner.is_empty()) self.task_runner.stop() def test_task_status(self): run_id = self.task_runner.add(self.task) self.task_runner.set_status(run_id=run_id, status=TaskRunner.RUNNING) self.assertEqual( self.task_runner.get_status(run_id=run_id), TaskRunner.RUNNING) def test_task_runner(self): self.task_runner.start() self.assertTrue(self.task_runner._is_running) run_id1 = self.task_runner.add(self.task) run_id2 = self.task_runner.add(self.error_task) self.assertTrue(self.task_runner._is_running) self.task_runner.stop() self.task_runner.join() self.assertFalse(self.task_runner._is_running) self.assertEqual( self.task_runner.get_status(run_id=run_id1), TaskRunner.COMPLETED) self.assertTrue(self.task.Output.ans, 30) self.assertEqual(self.task_runner.get_status( run_id=run_id2), TaskRunner.ERROR)
{ "content_hash": "063e45310179dc6579be15be83318eb4", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 78, "avg_line_length": 33.784615384615385, "alnum_prop": 0.6220400728597449, "repo_name": "bpsagar/captaincloud", "id": "d20adb6621c33a9c2ee536acdf6e92cd2617b114", "size": "2196", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/processes/task_runner/test_runner.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "50704" } ], "symlink_target": "" }
"""Unit tests for the OWASP Dependency Check source up-to-dateness collector.""" from datetime import datetime, timedelta, timezone from .base import OWASPDependencyCheckTestCase class OWASPDependencyCheckTest(OWASPDependencyCheckTestCase): """Unit tests for the OWASP Dependency Check source up-to-dateness collector.""" METRIC_TYPE = "source_up_to_dateness" async def test_source_up_to_dateness(self): """Test that the source age in days is returned.""" response = await self.collect(get_request_text=self.xml) timezone_info = timezone(timedelta(hours=2)) expected_age = (datetime.now(timezone_info) - datetime(2018, 10, 3, 13, 1, 24, 784, tzinfo=timezone_info)).days self.assert_measurement(response, value=str(expected_age))
{ "content_hash": "19bfb3dff865e9497b80f32883263e8c", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 119, "avg_line_length": 43.72222222222222, "alnum_prop": 0.7255400254129606, "repo_name": "ICTU/quality-time", "id": "e245724721f29553110e24fd0d3bfec81ef03ffc", "size": "787", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "components/collector/tests/source_collectors/owasp_dependency_check/test_source_up_to_dateness.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "11325" }, { "name": "Dockerfile", "bytes": "7493" }, { "name": "Gherkin", "bytes": "48447" }, { "name": "HTML", "bytes": "1575" }, { "name": "JavaScript", "bytes": "547159" }, { "name": "Python", "bytes": "1386198" }, { "name": "Shell", "bytes": "19321" } ], "symlink_target": "" }
import os import sys import django from django.conf import settings DEFAULT_SETTINGS = { "INSTALLED_APPS": ( "cassava", ), "DATABASES": { "default": { "ENGINE": "django.db.backends.sqlite3" } }, } def runtests(): if not settings.configured: settings.configure(**DEFAULT_SETTINGS) # Compatibility with Django 1.7's stricter initialization if hasattr(django, 'setup'): django.setup() parent = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, parent) try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ['cassava.tests'] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ['tests'] failures = runner_class( verbosity=1, interactive=True, failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == '__main__': runtests()
{ "content_hash": "3a7066cb1238b73ac79d8ee75c3ffe15", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 75, "avg_line_length": 22.369565217391305, "alnum_prop": 0.6229348882410107, "repo_name": "tomkingston/django-cassava", "id": "e68ab9e566b9647d7f11917777a547778acfda5a", "size": "1029", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "runtests.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "8248" } ], "symlink_target": "" }
import scene, pygame, sys isLinux = sys.platform.startswith("linux") if(isLinux): try: from gi.repository import Gtk import sugar3.activity.activity from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.activity.widgets import ActivityToolbarButton from sugar3.graphics.toolbutton import ToolButton from sugar3.activity.widgets import StopButton except ImportError: isLinux = False class MenuScene(scene.Scene): def __init__(self): self.font_color = (200,200,200) self.font = pygame.font.SysFont("comicsansms", 500) self.label = self.font.render("Play!", 1, self.font_color) def render(self, surface): surface.fill((0,0,0)) surface.blit(self.label, (100,100)) def update(self, keys, keysLastFrame): pass def handle_events(self, events): for event in events: #print("event is " + str(event) if event.type == pygame.MOUSEBUTTONDOWN: print "this works" self.manager.go_to(scene.Scenes.INTERACTIVE) def makeToolbar(self, activity): toolbar = ToolbarBox() activity_button = ActivityToolbarButton(activity) toolbar.toolbar.insert(activity_button, -1) activity_button.show() separator = Gtk.SeparatorToolItem() separator.props.draw = False separator.set_expand(True) toolbar.toolbar.insert(separator, -1) separator.show() stop_button = StopButton(activity) toolbar.toolbar.insert(stop_button, -1) stop_button.show() return toolbar
{ "content_hash": "caa5d1e426bf180a22c6bb1914e17964", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 66, "avg_line_length": 35.48936170212766, "alnum_prop": 0.6264988009592326, "repo_name": "ColdSauce/golems", "id": "053a8803d82562be0fcd86e9bcbea054dc137c22", "size": "1668", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/menuscene.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "142127" } ], "symlink_target": "" }
""" Slack interface. """ __author__ = "Alex Drlica-Wagner" import os import logging from datetime import datetime import pandas as pd from obztak.utils.database import Database def post_message(text, token=None, channel=None): """Post text to slack.""" from slackclient import SlackClient slack_token = os.environ["SLACK_API_TOKEN"] if token is None else token slack_channel = os.environ["SLACK_API_CHANNEL"] if channel is None else channel sc = SlackClient(slack_token) kwargs = dict(method="chat.postMessage",channel=slack_channel,text=text) ret = sc.api_call(**kwargs) return ret #slack_post = post_message def post_file(filepath, title=None, token=None, channels=None): """Post a file to a Slack channel""" from slackclient import SlackClient token = os.environ["SLACK_API_TOKEN"] if token is None else token channels = os.environ["SLACK_API_CHANNEL"] if channels is None else channels sc = SlackClient(token) kwargs = dict(method="files.upload",channels=channels,title=title) ret = None with open(filepath, 'rb') as file_content: ret = sc.api_call(file=file_content,**kwargs) return ret def post_qcinv(token=None, channel=None, propid=None, timedelta=None, debug=False): """Post inventory results to Slack. Parameters: ----------- token : slack bot token channel: slack channel propid : proposal id timedelta: time to collect exposure info debug : execute but don't post Returns: -------- df,pkg : data frame and text package """ db = Database() db.connect() df = db.qcInv(propid=propid,timedelta=timedelta) if not len(df): logging.debug("No exposures found") return df,None kwargs = dict(index=False, float_format='{:.2f}'.format, justify='right') package = """Observing update @ {time} CST: ``` {content} ```""".format(content=df.fillna('').to_string(**kwargs), time=datetime.now().strftime("%H:%M") ) logging.debug(package) if debug: logging.debug("Exiting without posting.") return df,package ret = post_message(package, token=token, channel=channel) return df,package
{ "content_hash": "56b93fb952b46874cccb5f06b4ff48de", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 83, "avg_line_length": 28.948051948051948, "alnum_prop": 0.6576940331987439, "repo_name": "kadrlica/obztak", "id": "59a34a837f707f1f736c11b4a9d1e5459bbd5b12", "size": "2251", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "obztak/utils/slack.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "434149" } ], "symlink_target": "" }
""" Implementation of a flexible versioning scheme providing support for PEP-440, setuptools-compatible and semantic versioning. """ import logging import re from .compat import string_types from .util import parse_requirement __all__ = ['NormalizedVersion', 'NormalizedMatcher', 'LegacyVersion', 'LegacyMatcher', 'SemanticVersion', 'SemanticMatcher', 'UnsupportedVersionError', 'get_scheme'] logger = logging.getLogger(__name__) class UnsupportedVersionError(ValueError): """This is an unsupported version.""" pass class Version(object): def __init__(self, s): self._string = s = s.strip() self._parts = parts = self.parse(s) assert isinstance(parts, tuple) assert len(parts) > 0 def parse(self, s): raise NotImplementedError('please implement in a subclass') def _check_compatible(self, other): if type(self) != type(other): raise TypeError('cannot compare %r and %r' % (self, other)) def __eq__(self, other): self._check_compatible(other) return self._parts == other._parts def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): self._check_compatible(other) return self._parts < other._parts def __gt__(self, other): return not (self.__lt__(other) or self.__eq__(other)) def __le__(self, other): return self.__lt__(other) or self.__eq__(other) def __ge__(self, other): return self.__gt__(other) or self.__eq__(other) # See http://docs.python.org/reference/datamodel#object.__hash__ def __hash__(self): return hash(self._parts) def __repr__(self): return "%s('%s')" % (self.__class__.__name__, self._string) def __str__(self): return self._string @property def is_prerelease(self): raise NotImplementedError('Please implement in subclasses.') class Matcher(object): version_class = None # value is either a callable or the name of a method _operators = { '<': lambda v, c, p: v < c, '>': lambda v, c, p: v > c, '<=': lambda v, c, p: v == c or v < c, '>=': lambda v, c, p: v == c or v > c, '==': lambda v, c, p: v == c, '===': lambda v, c, p: v == c, # by default, compatible => >=. '~=': lambda v, c, p: v == c or v > c, '!=': lambda v, c, p: v != c, } # this is a method only to support alternative implementations # via overriding def parse_requirement(self, s): return parse_requirement(s) def __init__(self, s): if self.version_class is None: raise ValueError('Please specify a version class') self._string = s = s.strip() r = self.parse_requirement(s) if not r: raise ValueError('Not valid: %r' % s) self.name = r.name self.key = self.name.lower() # for case-insensitive comparisons clist = [] if r.constraints: # import pdb; pdb.set_trace() for op, s in r.constraints: if s.endswith('.*'): if op not in ('==', '!='): raise ValueError('\'.*\' not allowed for ' '%r constraints' % op) # Could be a partial version (e.g. for '2.*') which # won't parse as a version, so keep it as a string vn, prefix = s[:-2], True # Just to check that vn is a valid version self.version_class(vn) else: # Should parse as a version, so we can create an # instance for the comparison vn, prefix = self.version_class(s), False clist.append((op, vn, prefix)) self._parts = tuple(clist) def match(self, version): """ Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """ if isinstance(version, string_types): version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if not f: msg = ('%r not implemented ' 'for %s' % (operator, self.__class__.__name__)) raise NotImplementedError(msg) if not f(version, constraint, prefix): return False return True @property def exact_version(self): result = None if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): result = self._parts[0][1] return result def _check_compatible(self, other): if type(self) != type(other) or self.name != other.name: raise TypeError('cannot compare %s and %s' % (self, other)) def __eq__(self, other): self._check_compatible(other) return self.key == other.key and self._parts == other._parts def __ne__(self, other): return not self.__eq__(other) # See http://docs.python.org/reference/datamodel#object.__hash__ def __hash__(self): return hash(self.key) + hash(self._parts) def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._string) def __str__(self): return self._string PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' r'(\.(post)(\d+))?(\.(dev)(\d+))?' r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') def _pep_440_key(s): s = s.strip() m = PEP440_VERSION_RE.match(s) if not m: raise UnsupportedVersionError('Not a valid version: %s' % s) groups = m.groups() nums = tuple(int(v) for v in groups[1].split('.')) while len(nums) > 1 and nums[-1] == 0: nums = nums[:-1] if not groups[0]: epoch = 0 else: epoch = int(groups[0]) pre = groups[4:6] post = groups[7:9] dev = groups[10:12] local = groups[13] if pre == (None, None): pre = () else: pre = pre[0], int(pre[1]) if post == (None, None): post = () else: post = post[0], int(post[1]) if dev == (None, None): dev = () else: dev = dev[0], int(dev[1]) if local is None: local = () else: parts = [] for part in local.split('.'): # to ensure that numeric compares as > lexicographic, avoid # comparing them directly, but encode a tuple which ensures # correct sorting if part.isdigit(): part = (1, int(part)) else: part = (0, part) parts.append(part) local = tuple(parts) if not pre: # either before pre-release, or final release and after if not post and dev: # before pre-release pre = ('a', -1) # to sort before a0 else: pre = ('z',) # to sort after all pre-releases # now look at the state of post and dev. if not post: post = ('_',) # sort before 'a' if not dev: dev = ('final',) #print('%s -> %s' % (s, m.groups())) return epoch, nums, pre, post, dev, local _normalized_key = _pep_440_key class NormalizedVersion(Version): """A rational version. Good: 1.2 # equivalent to "1.2.0" 1.2.0 1.2a1 1.2.3a2 1.2.3b1 1.2.3c1 1.2.3.4 TODO: fill this out Bad: 1 # minimum two numbers 1.2a # release level must have a release serial 1.2.3b """ def parse(self, s): result = _normalized_key(s) # _normalized_key loses trailing zeroes in the release # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 # However, PEP 440 prefix matching needs it: for example, # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). m = PEP440_VERSION_RE.match(s) # must succeed groups = m.groups() self._release_clause = tuple(int(v) for v in groups[1].split('.')) return result PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) @property def is_prerelease(self): return any(t[0] in self.PREREL_TAGS for t in self._parts if t) def _match_prefix(x, y): x = str(x) y = str(y) if x == y: return True if not x.startswith(y): return False n = len(y) return x[n] == '.' class NormalizedMatcher(Matcher): version_class = NormalizedVersion # value is either a callable or the name of a method _operators = { '~=': '_match_compatible', '<': '_match_lt', '>': '_match_gt', '<=': '_match_le', '>=': '_match_ge', '==': '_match_eq', '===': '_match_arbitrary', '!=': '_match_ne', } def _adjust_local(self, version, constraint, prefix): if prefix: strip_local = '+' not in constraint and version._parts[-1] else: # both constraint and version are # NormalizedVersion instances. # If constraint does not have a local component, # ensure the version doesn't, either. strip_local = not constraint._parts[-1] and version._parts[-1] if strip_local: s = version._string.split('+', 1)[0] version = self.version_class(s) return version, constraint def _match_lt(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if version >= constraint: return False release_clause = constraint._release_clause pfx = '.'.join([str(i) for i in release_clause]) return not _match_prefix(version, pfx) def _match_gt(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if version <= constraint: return False release_clause = constraint._release_clause pfx = '.'.join([str(i) for i in release_clause]) return not _match_prefix(version, pfx) def _match_le(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) return version <= constraint def _match_ge(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) return version >= constraint def _match_eq(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if not prefix: result = (version == constraint) else: result = _match_prefix(version, constraint) return result def _match_arbitrary(self, version, constraint, prefix): return str(version) == str(constraint) def _match_ne(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if not prefix: result = (version != constraint) else: result = not _match_prefix(version, constraint) return result def _match_compatible(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if version == constraint: return True if version < constraint: return False # if not prefix: # return True release_clause = constraint._release_clause if len(release_clause) > 1: release_clause = release_clause[:-1] pfx = '.'.join([str(i) for i in release_clause]) return _match_prefix(version, pfx) _REPLACEMENTS = ( (re.compile('[.+-]$'), ''), # remove trailing puncts (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start (re.compile('^[.-]'), ''), # remove leading puncts (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) (re.compile('[.]{2,}'), '.'), # multiple runs of '.' (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha (re.compile(r'\b(pre-alpha|prealpha)\b'), 'pre.alpha'), # standardise (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses ) _SUFFIX_REPLACEMENTS = ( (re.compile('^[:~._+-]+'), ''), # remove leading puncts (re.compile('[,*")([\\]]'), ''), # remove unwanted chars (re.compile('[~:+_ -]'), '.'), # replace illegal chars (re.compile('[.]{2,}'), '.'), # multiple runs of '.' (re.compile(r'\.$'), ''), # trailing '.' ) _NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') def _suggest_semantic_version(s): """ Try to suggest a semantic form for a version for which _suggest_normalized_version couldn't come up with anything. """ result = s.strip().lower() for pat, repl in _REPLACEMENTS: result = pat.sub(repl, result) if not result: result = '0.0.0' # Now look for numeric prefix, and separate it out from # the rest. #import pdb; pdb.set_trace() m = _NUMERIC_PREFIX.match(result) if not m: prefix = '0.0.0' suffix = result else: prefix = m.groups()[0].split('.') prefix = [int(i) for i in prefix] while len(prefix) < 3: prefix.append(0) if len(prefix) == 3: suffix = result[m.end():] else: suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] prefix = prefix[:3] prefix = '.'.join([str(i) for i in prefix]) suffix = suffix.strip() if suffix: #import pdb; pdb.set_trace() # massage the suffix. for pat, repl in _SUFFIX_REPLACEMENTS: suffix = pat.sub(repl, suffix) if not suffix: result = prefix else: sep = '-' if 'dev' in suffix else '+' result = prefix + sep + suffix if not is_semver(result): result = None return result def _suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. """ try: _normalized_key(s) return s # already rational except UnsupportedVersionError: pass rs = s.lower() # part of this could use maketrans for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), ('beta', 'b'), ('rc', 'c'), ('-final', ''), ('-pre', 'c'), ('-release', ''), ('.release', ''), ('-stable', ''), ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), ('final', '')): rs = rs.replace(orig, repl) # if something ends with dev or pre, we add a 0 rs = re.sub(r"pre$", r"pre0", rs) rs = re.sub(r"dev$", r"dev0", rs) # if we have something like "b-2" or "a.2" at the end of the # version, that is probably beta, alpha, etc # let's remove the dash or dot rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) # 1.0-dev-r371 -> 1.0.dev371 # 0.1-dev-r79 -> 0.1.dev79 rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) # Clean: v0.3, v1.0 if rs.startswith('v'): rs = rs[1:] # Clean leading '0's on numbers. #TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers # zero. # PyPI stats: 245 (7.56%) better rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) # the 'dev-rNNN' tag is a dev tag rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) # clean the - when used as a pre delimiter rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) # a terminal "dev" or "devel" can be changed into ".dev0" rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) # a terminal "dev" can be changed into ".dev0" rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) # a terminal "final" or "stable" can be removed rs = re.sub(r"(final|stable)$", "", rs) # The 'r' and the '-' tags are post release tags # 0.4a1.r10 -> 0.4a1.post10 # 0.9.33-17222 -> 0.9.33.post17222 # 0.9.33-r17222 -> 0.9.33.post17222 rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) # Clean 'r' instead of 'dev' usage: # 0.9.33+r17222 -> 0.9.33.dev17222 # 1.0dev123 -> 1.0.dev123 # 1.0.git123 -> 1.0.dev123 # 1.0.bzr123 -> 1.0.dev123 # 0.1a0dev.123 -> 0.1a0.dev123 # PyPI stats: ~150 (~4%) better rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: # 0.2.pre1 -> 0.2c1 # 0.2-c1 -> 0.2c1 # 1.0preview123 -> 1.0c123 # PyPI stats: ~21 (0.62%) better rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) # Tcl/Tk uses "px" for their post release markers rs = re.sub(r"p(\d+)$", r".post\1", rs) try: _normalized_key(rs) except UnsupportedVersionError: rs = None return rs # # Legacy version processing (distribute-compatible) # _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) _VERSION_REPLACE = { 'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@', '': None, '.': None, } def _legacy_key(s): def get_parts(s): result = [] for p in _VERSION_PART.split(s.lower()): p = _VERSION_REPLACE.get(p, p) if p: if '0' <= p[:1] <= '9': p = p.zfill(8) else: p = '*' + p result.append(p) result.append('*final') return result result = [] for p in get_parts(s): if p.startswith('*'): if p < '*final': while result and result[-1] == '*final-': result.pop() while result and result[-1] == '00000000': result.pop() result.append(p) return tuple(result) class LegacyVersion(Version): def parse(self, s): return _legacy_key(s) @property def is_prerelease(self): result = False for x in self._parts: if (isinstance(x, string_types) and x.startswith('*') and x < '*final'): result = True break return result class LegacyMatcher(Matcher): version_class = LegacyVersion _operators = dict(Matcher._operators) _operators['~='] = '_match_compatible' numeric_re = re.compile(r'^(\d+(\.\d+)*)') def _match_compatible(self, version, constraint, prefix): if version < constraint: return False m = self.numeric_re.match(str(constraint)) if not m: logger.warning('Cannot compute compatible match for version %s ' ' and constraint %s', version, constraint) return True s = m.groups()[0] if '.' in s: s = s.rsplit('.', 1)[0] return _match_prefix(version, s) # # Semantic versioning # _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) def is_semver(s): return _SEMVER_RE.match(s) def _semantic_key(s): def make_tuple(s, absent): if s is None: result = (absent,) else: parts = s[1:].split('.') # We can't compare ints and strings on Python 3, so fudge it # by zero-filling numeric values so simulate a numeric comparison result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) return result m = is_semver(s) if not m: raise UnsupportedVersionError(s) groups = m.groups() major, minor, patch = [int(i) for i in groups[:3]] # choose the '|' and '*' so that versions sort correctly pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') return (major, minor, patch), pre, build class SemanticVersion(Version): def parse(self, s): return _semantic_key(s) @property def is_prerelease(self): return self._parts[1][0] != '|' class SemanticMatcher(Matcher): version_class = SemanticVersion class VersionScheme(object): def __init__(self, key, matcher, suggester=None): self.key = key self.matcher = matcher self.suggester = suggester def is_valid_version(self, s): try: self.matcher.version_class(s) result = True except UnsupportedVersionError: result = False return result def is_valid_matcher(self, s): try: self.matcher(s) result = True except UnsupportedVersionError: result = False return result def is_valid_constraint_list(self, s): """ Used for processing some metadata fields """ # See issue #140. Be tolerant of a single trailing comma. if s.endswith(','): s = s[:-1] return self.is_valid_matcher('dummy_name (%s)' % s) def suggest(self, s): if self.suggester is None: result = None else: result = self.suggester(s) return result _SCHEMES = { 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, _suggest_normalized_version), 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), 'semantic': VersionScheme(_semantic_key, SemanticMatcher, _suggest_semantic_version), } _SCHEMES['default'] = _SCHEMES['normalized'] def get_scheme(name): if name not in _SCHEMES: raise ValueError('unknown scheme name: %r' % name) return _SCHEMES[name]
{ "content_hash": "398c3c14a026b530c936f2b113e58885", "timestamp": "", "source": "github", "line_count": 734, "max_line_length": 78, "avg_line_length": 31.85558583106267, "alnum_prop": 0.5250620135146694, "repo_name": "sonntagsgesicht/regtest", "id": "86c069a7c2afbd54954452fb3a417a842cbdd24f", "size": "23508", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": ".aux/venv/lib/python3.9/site-packages/pip/_vendor/distlib/version.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "13888" } ], "symlink_target": "" }
from PyQt5 import QtCore, QtGui, QtWidgets class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(635, 400) self.centralwidget = QtWidgets.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.gridLayout = QtWidgets.QGridLayout(self.centralwidget) self.gridLayout.setObjectName("gridLayout") self.tabWidget = QtWidgets.QTabWidget(self.centralwidget) self.tabWidget.setTabsClosable(True) self.tabWidget.setObjectName("tabWidget") self.tab = QtWidgets.QWidget() self.tab.setObjectName("tab") self.gridLayout_2 = QtWidgets.QGridLayout(self.tab) self.gridLayout_2.setObjectName("gridLayout_2") self.label = QtWidgets.QLabel(self.tab) self.label.setObjectName("label") self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1) self.tabWidget.addTab(self.tab, "") self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1) MainWindow.setCentralWidget(self.centralwidget) self.menubar = QtWidgets.QMenuBar(MainWindow) self.menubar.setGeometry(QtCore.QRect(0, 0, 635, 21)) self.menubar.setObjectName("menubar") MainWindow.setMenuBar(self.menubar) self.statusbar = QtWidgets.QStatusBar(MainWindow) self.statusbar.setObjectName("statusbar") MainWindow.setStatusBar(self.statusbar) self.retranslateUi(MainWindow) self.tabWidget.setCurrentIndex(0) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) self.label.setText(_translate("MainWindow", "Test Tab")) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Tab 1"))
{ "content_hash": "a884d66506d0567f19c7640307dd6075", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 102, "avg_line_length": 47.21951219512195, "alnum_prop": 0.6967975206611571, "repo_name": "mgotz/EBT_evaluation", "id": "5c24be9c8f225a302dbdf72005d01c176e162578", "size": "2141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/testwindow_ui_qt5.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "255203" } ], "symlink_target": "" }
import factory from fjord.base import browsers from fjord.feedback.models import ( Product, Response, ResponseContext, ResponseEmail, ResponsePI ) USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:17.0) Gecko/17.0 Firefox/17.0' class ProductFactory(factory.DjangoModelFactory): class Meta: model = Product display_name = u'Firefox' db_name = factory.LazyAttribute(lambda a: a.display_name) notes = u'' slug = u'firefox' enabled = True on_dashboard = True on_picker = True browser_data_browser = u'' class ResponseFactory(factory.DjangoModelFactory): class Meta: model = Response happy = True, url = u'' description = u'So awesome!' user_agent = USER_AGENT browser = factory.LazyAttribute( lambda a: browsers.parse_ua(a.user_agent).browser) browser_version = factory.LazyAttribute( lambda a: browsers.parse_ua(a.user_agent).browser_version) platform = factory.LazyAttribute( lambda a: browsers.parse_ua(a.user_agent).platform) product = factory.LazyAttribute( lambda a: Response.infer_product( browsers.parse_ua(a.user_agent))) channel = u'stable' version = factory.LazyAttribute( lambda a: browsers.parse_ua(a.user_agent).browser_version) locale = u'en-US' class ResponseEmailFactory(factory.DjangoModelFactory): class Meta: model = ResponseEmail opinion = factory.SubFactory(ResponseFactory) email = '[email protected]' class ResponseContextFactory(factory.DjangoModelFactory): class Meta: model = ResponseContext opinion = factory.SubFactory(ResponseFactory) data = '{}' class ResponsePIFactory(factory.DjangoModelFactory): class Meta: model = ResponsePI opinion = factory.SubFactory(ResponseFactory) data = '{}'
{ "content_hash": "8a98c49c0b5c884b6ee6dd9b431e387f", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 77, "avg_line_length": 24.194805194805195, "alnum_prop": 0.6768652710681696, "repo_name": "DESHRAJ/fjord", "id": "5670c4021e130a2e6f977d9dcf05ccee638ffa3d", "size": "1863", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fjord/feedback/tests/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "168457" }, { "name": "JavaScript", "bytes": "299449" }, { "name": "Makefile", "bytes": "4594" }, { "name": "Python", "bytes": "709245" }, { "name": "Shell", "bytes": "13991" } ], "symlink_target": "" }
import os import base64 import urllib local_file = lambda x: os.path.join(os.path.dirname(__file__), x) def read_from_local(img_name): img_file = open(local_file(img_name), 'rb') img_content = img_file.read() return img_content def read_from_remote(url): img_resource = urllib.urlopen(url) img_content = img_resource.read() return img_content def write_to_local(bin_content, file_name="sample"): b64_content = base64.b64encode(bin_content) template = '<img src="data:image/png;base64,%s"/>' open(local_file(file_name + '.html'), 'w').write(template % b64_content) def test(): img_url = 'https://www.python.org/static/img/python-logo.png' write_to_local(read_from_remote(img_url), file_name='ignore-remote') write_to_local(read_from_local('python-logo.png'), file_name='ignore-local') if __name__ == '__main__': test()
{ "content_hash": "98577182610cfaf1db5d55d6161b4b58", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 80, "avg_line_length": 25.941176470588236, "alnum_prop": 0.6621315192743764, "repo_name": "catroll/clipboard", "id": "520538cb035c83087444a742b5a22e9764008466", "size": "882", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "base64img/base64img.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "46233" }, { "name": "HTML", "bytes": "25991" }, { "name": "JavaScript", "bytes": "36164" }, { "name": "Python", "bytes": "48778" }, { "name": "Shell", "bytes": "121" } ], "symlink_target": "" }
from bug import Bug, BugException, Comment # noqa from bugsy import Bugsy, BugsyException, LoginException # noqa from search import Search # noqa
{ "content_hash": "6eb26720d6b7a0d1e1d8a451bafd65e6", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 63, "avg_line_length": 49.666666666666664, "alnum_prop": 0.785234899328859, "repo_name": "parkouss/Bugsy", "id": "4bc0c9fe7c19bbef6c55e2d6cc5fd13813be00ab", "size": "149", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bugsy/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "63362" } ], "symlink_target": "" }
"""Python file with invalid syntax, used by scripts/linters/ python_linter_test. This file is using unquote() which is not allowed. """ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules import urlparse import python_utils class FakeClass(python_utils.OBJECT): """This is a fake docstring for invalid syntax purposes.""" def __init__(self, fake_arg): self.fake_arg = fake_arg def fake_method(self, content): """This doesn't do anything. Args: content: str. The string to be unquoted. Returns: unquote(object). The unquoted string. """ return urllib.unquote(content) # Use of unquote is not allowed.
{ "content_hash": "f61299651d1e54df36152c0d87cb7b92", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 78, "avg_line_length": 28.5, "alnum_prop": 0.6654135338345865, "repo_name": "prasanna08/oppia", "id": "5ba781be46b9647d0c4dad9aeb236100998fe6ae", "size": "1421", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "scripts/linters/test_files/invalid_unquote.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "97795" }, { "name": "HTML", "bytes": "1128491" }, { "name": "JavaScript", "bytes": "733121" }, { "name": "Python", "bytes": "9362251" }, { "name": "Shell", "bytes": "10639" }, { "name": "TypeScript", "bytes": "6077851" } ], "symlink_target": "" }
"""Support for Tuya Smart devices.""" import asyncio from datetime import timedelta import logging from tuyaha import TuyaApi from tuyaha.tuyaapi import TuyaAPIException, TuyaNetException, TuyaServerException import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME from homeassistant.core import callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from .const import ( CONF_COUNTRYCODE, DOMAIN, TUYA_DATA, TUYA_DISCOVERY_NEW, TUYA_PLATFORMS, ) _LOGGER = logging.getLogger(__name__) ENTRY_IS_SETUP = "tuya_entry_is_setup" PARALLEL_UPDATES = 0 SERVICE_FORCE_UPDATE = "force_update" SERVICE_PULL_DEVICES = "pull_devices" SIGNAL_DELETE_ENTITY = "tuya_delete" SIGNAL_UPDATE_ENTITY = "tuya_update" TUYA_TYPE_TO_HA = { "climate": "climate", "cover": "cover", "fan": "fan", "light": "light", "scene": "scene", "switch": "switch", } TUYA_TRACKER = "tuya_tracker" CONFIG_SCHEMA = vol.Schema( vol.All( cv.deprecated(DOMAIN), { DOMAIN: vol.Schema( { vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_COUNTRYCODE): cv.string, vol.Optional(CONF_PLATFORM, default="tuya"): cv.string, } ) }, ), extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Tuya integration.""" conf = config.get(DOMAIN) if conf is not None: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf ) ) return True async def async_setup_entry(hass, entry): """Set up Tuya platform.""" tuya = TuyaApi() username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] country_code = entry.data[CONF_COUNTRYCODE] platform = entry.data[CONF_PLATFORM] try: await hass.async_add_executor_job( tuya.init, username, password, country_code, platform ) except (TuyaNetException, TuyaServerException): raise ConfigEntryNotReady() except TuyaAPIException as exc: _LOGGER.error( "Connection error during integration setup. Error: %s", exc, ) return False hass.data[DOMAIN] = { TUYA_DATA: tuya, TUYA_TRACKER: None, ENTRY_IS_SETUP: set(), "entities": {}, "pending": {}, } async def async_load_devices(device_list): """Load new devices by device_list.""" device_type_list = {} for device in device_list: dev_type = device.device_type() if ( dev_type in TUYA_TYPE_TO_HA and device.object_id() not in hass.data[DOMAIN]["entities"] ): ha_type = TUYA_TYPE_TO_HA[dev_type] if ha_type not in device_type_list: device_type_list[ha_type] = [] device_type_list[ha_type].append(device.object_id()) hass.data[DOMAIN]["entities"][device.object_id()] = None for ha_type, dev_ids in device_type_list.items(): config_entries_key = f"{ha_type}.tuya" if config_entries_key not in hass.data[DOMAIN][ENTRY_IS_SETUP]: hass.data[DOMAIN]["pending"][ha_type] = dev_ids hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, ha_type) ) hass.data[DOMAIN][ENTRY_IS_SETUP].add(config_entries_key) else: async_dispatcher_send(hass, TUYA_DISCOVERY_NEW.format(ha_type), dev_ids) device_list = await hass.async_add_executor_job(tuya.get_all_devices) await async_load_devices(device_list) def _get_updated_devices(): tuya.poll_devices_update() return tuya.get_all_devices() async def async_poll_devices_update(event_time): """Check if accesstoken is expired and pull device list from server.""" _LOGGER.debug("Pull devices from Tuya.") # Add new discover device. device_list = await hass.async_add_executor_job(_get_updated_devices) await async_load_devices(device_list) # Delete not exist device. newlist_ids = [] for device in device_list: newlist_ids.append(device.object_id()) for dev_id in list(hass.data[DOMAIN]["entities"]): if dev_id not in newlist_ids: async_dispatcher_send(hass, SIGNAL_DELETE_ENTITY, dev_id) hass.data[DOMAIN]["entities"].pop(dev_id) hass.data[DOMAIN][TUYA_TRACKER] = async_track_time_interval( hass, async_poll_devices_update, timedelta(minutes=5) ) hass.services.async_register( DOMAIN, SERVICE_PULL_DEVICES, async_poll_devices_update ) async def async_force_update(call): """Force all devices to pull data.""" async_dispatcher_send(hass, SIGNAL_UPDATE_ENTITY) hass.services.async_register(DOMAIN, SERVICE_FORCE_UPDATE, async_force_update) return True async def async_unload_entry(hass, entry): """Unloading the Tuya platforms.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload( entry, component.split(".", 1)[0] ) for component in hass.data[DOMAIN][ENTRY_IS_SETUP] ] ) ) if unload_ok: hass.data[DOMAIN][ENTRY_IS_SETUP] = set() hass.data[DOMAIN][TUYA_TRACKER]() hass.data[DOMAIN][TUYA_TRACKER] = None hass.data[DOMAIN][TUYA_DATA] = None hass.services.async_remove(DOMAIN, SERVICE_FORCE_UPDATE) hass.services.async_remove(DOMAIN, SERVICE_PULL_DEVICES) hass.data.pop(DOMAIN) return unload_ok class TuyaDevice(Entity): """Tuya base device.""" def __init__(self, tuya, platform): """Init Tuya devices.""" self._tuya = tuya self._tuya_platform = platform async def async_added_to_hass(self): """Call when entity is added to hass.""" dev_id = self._tuya.object_id() self.hass.data[DOMAIN]["entities"][dev_id] = self.entity_id async_dispatcher_connect(self.hass, SIGNAL_DELETE_ENTITY, self._delete_callback) async_dispatcher_connect(self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback) @property def object_id(self): """Return Tuya device id.""" return self._tuya.object_id() @property def unique_id(self): """Return a unique ID.""" return f"tuya.{self._tuya.object_id()}" @property def name(self): """Return Tuya device name.""" return self._tuya.name() @property def available(self): """Return if the device is available.""" return self._tuya.available() @property def device_info(self): """Return a device description for device registry.""" _device_info = { "identifiers": {(DOMAIN, f"{self.unique_id}")}, "manufacturer": TUYA_PLATFORMS.get( self._tuya_platform, self._tuya_platform ), "name": self.name, "model": self._tuya.object_type(), } return _device_info def update(self): """Refresh Tuya device data.""" self._tuya.update() async def _delete_callback(self, dev_id): """Remove this entity.""" if dev_id == self.object_id: entity_registry = ( await self.hass.helpers.entity_registry.async_get_registry() ) if entity_registry.async_is_registered(self.entity_id): entity_registry.async_remove(self.entity_id) else: await self.async_remove() @callback def _update_callback(self): """Call update method.""" self.async_schedule_update_ha_state(True)
{ "content_hash": "b34485e2506b709c970438be528be2f8", "timestamp": "", "source": "github", "line_count": 270, "max_line_length": 88, "avg_line_length": 31.28148148148148, "alnum_prop": 0.600757755150367, "repo_name": "robbiet480/home-assistant", "id": "4a522b76b8ec561bfe725632d6427ad26ead070e", "size": "8446", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "homeassistant/components/tuya/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "18837456" }, { "name": "Shell", "bytes": "6846" } ], "symlink_target": "" }
"""test_restconf_session.py RestconfSession test """ from __future__ import absolute_import import os import unittest from ydk.path.sessions import RestconfSession from ydk.types import EncodingFormat from ydk.path import Repository from ydk.path import Codec class SanityTest(unittest.TestCase): @classmethod def setUpClass(self): # Need to keep a local reference for repo to keep it alive repo_path = os.path.dirname(__file__) repo_path = os.path.join(repo_path, '..', '..', '..', 'cpp', 'core', 'tests', 'models') self.repo = Repository(repo_path) self.restconf_session = RestconfSession(self.repo, 'localhost', 'admin', 'admin', 12306, EncodingFormat.JSON, "/data", "/data") def test_create_del_read(self): root_schema = self.restconf_session.get_root_schema() runner = root_schema.create_datanode('ydktest-sanity:runner', '') delete_rpc = root_schema.create_rpc('ydk:delete') codec_service = Codec() json = codec_service.encode(runner, EncodingFormat.JSON, False) delete_rpc.get_input_node().create_datanode('entity', json) delete_rpc(self.restconf_session) runner.create_datanode('ytypes/built-in-t/number8', '3') json = codec_service.encode(runner, EncodingFormat.JSON, False) self.assertNotEqual(json, '') create_rpc = root_schema.create_rpc('ydk:create') create_rpc.get_input_node().create_datanode('entity', json) read_rpc = root_schema.create_rpc('ydk:read') runner_read = root_schema.create_datanode('ydktest-sanity:runner', '') json = codec_service.encode(runner_read, EncodingFormat.JSON, False) self.assertNotEqual(json, '') read_rpc.get_input_node().create_datanode('filter', json) read_result = read_rpc(self.restconf_session) self.assertEqual(read_result is not None, True) runner = root_schema.create_datanode('ydktest-sanity:runner', '') runner.create_datanode('ytypes/built-in-t/number8', '5') json = codec_service.encode(runner, EncodingFormat.JSON, False) self.assertNotEqual(json, '') update_rpc = root_schema.create_rpc('ydk:update') update_rpc.get_input_node().create_datanode('entity', json) update_rpc(self.restconf_session) if __name__ == '__main__': import sys suite = unittest.TestLoader().loadTestsFromTestCase(SanityTest) ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful() sys.exit(ret)
{ "content_hash": "b231fca81d9f5672f4c8c4fd1fa812d9", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 135, "avg_line_length": 37.850746268656714, "alnum_prop": 0.666403785488959, "repo_name": "psykokwak4/ydk-gen", "id": "cba5436771422d6feb7c93ee6150d1559e8f3210", "size": "3250", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sdk/python/core/tests/test_restconf_session.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "14940" }, { "name": "C", "bytes": "834" }, { "name": "C++", "bytes": "2491676" }, { "name": "CMake", "bytes": "46417" }, { "name": "CSS", "bytes": "67" }, { "name": "Makefile", "bytes": "32099" }, { "name": "Objective-C", "bytes": "4625" }, { "name": "Python", "bytes": "713839" }, { "name": "Ruby", "bytes": "4023" }, { "name": "Shell", "bytes": "20553" } ], "symlink_target": "" }
''' Author: John D. Anderson Email: [email protected] Problem Statement: You have 20 bottles of pills. 19 bottles have 1.0 gram pills, but on has pills of 1.1 grams. Given a scale that provides an exact measurement, how would you find the heavy bottle? You can only use the scale once. Complexity: O(N) Usage: the_heavy_pill <bottles> ''' # libraries import random # function def answer(n): # generate bottle numbers bottles = [x+1 for x in range(n)] # get expected sum expected = sum(bottles) # seed large pill heavypill = random.randint(0, n-1) # fill bottles bottles[heavypill] *= 1.1 # get actual sum actual = sum(bottles) # exit return (actual - expected) / 0.1 # executable if __name__ == '__main__': # executable import only from docopt import docopt # check CLA args = docopt(__doc__) # run print answer(int(args['<bottles>']))
{ "content_hash": "119d952c0de8cb98e5d32b3df2b4b57c", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 76, "avg_line_length": 19.375, "alnum_prop": 0.6483870967741936, "repo_name": "RagingTiger/CodingInterview", "id": "e0db2a2111e22eae1a18227852dc8ac00eef8714", "size": "953", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ch6/python/the_heavy_pill.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "45308" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('gem', '0032_remove_gem_profiles'), ] operations = [ migrations.AlterField( model_name='commentcountrule', name='operator', field=models.CharField(choices=[('gt', 'greater than'), ('lt', 'less than'), ('eq', 'equals')], default='gt', max_length=3, verbose_name='operator'), ), migrations.AlterField( model_name='gemsettings', name='banned_keywords_and_patterns', field=models.TextField(blank=True, help_text='Banned keywords and patterns for comments, separated by a line a break. Use only lowercase letters for keywords.', null=True, verbose_name='Banned Keywords and Patterns'), ), migrations.AlterField( model_name='gemsettings', name='banned_names_with_offensive_language', field=models.TextField(blank=True, help_text='Banned names with offensive language, separated by a line a break. Use only lowercase letters for keywords.', null=True, verbose_name='Banned Names With Offensive Language'), ), migrations.AlterField( model_name='gemsettings', name='bbm_ga_account_subdomain', field=models.TextField(default='bbm', help_text='Subdomain prefix to seperate traffics data for Google Analytics. Defaults to "bbm"'), ), migrations.AlterField( model_name='gemsettings', name='bbm_ga_tracking_code', field=models.TextField(blank=True, help_text='Tracking code for additional Google Analytics account to divert traffic that matches a specific subdomain.', null=True), ), migrations.AlterField( model_name='gemsettings', name='moderator_name', field=models.TextField(blank=True, help_text='This is the name that will appear on the front end when a moderator responds to a user', null=True, verbose_name='Moderator Name'), ), migrations.AlterField( model_name='gemsettings', name='partner_credit_description', field=models.TextField(blank=True, help_text='The text that will be shown for the partner credit e.g. "Translated by Sajan"', null=True), ), migrations.AlterField( model_name='gemsettings', name='partner_credit_link', field=models.TextField(blank=True, help_text=' The link that the partner credit will redirect to e.g. https://www.google.co.za/', null=True), ), migrations.AlterField( model_name='gemsettings', name='show_join_banner', field=models.BooleanField(default=False, help_text='When true, this will show the join banner on the homepage.'), ), migrations.AlterField( model_name='gemsettings', name='show_partner_credit', field=models.BooleanField(default=False, help_text='When true, this will show the partner credit on the homepage.'), ), migrations.AlterField( model_name='oidcsettings', name='oidc_rp_scopes', field=models.CharField(blank=True, default='openid profile email address phone site roles', max_length=255), ), migrations.AlterField( model_name='profiledatarule', name='operator', field=models.CharField(choices=[('lt', 'Less than'), ('lte', 'Less than or equal'), ('gt', 'Greater than'), ('gte', 'Greater than or equal'), ('eq', 'Equal'), ('neq', 'Not equal'), ('ol', 'Older than'), ('ole', 'Older than or equal'), ('yg', 'Younger than'), ('yge', 'Younger than or equal'), ('eqa', 'Of age'), ('reg', 'Regex')], default='eq', help_text='Age operators work only on dates, please input the age you want to compare in "value". When using greater/less than on text field, it would compare it by alphabetical order, where dates are compared to the specified date by chronological order.', max_length=3), ), ]
{ "content_hash": "f3f1df415b77f815910ffa866f94838d", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 629, "avg_line_length": 56.54794520547945, "alnum_prop": 0.6308139534883721, "repo_name": "praekelt/molo-gem", "id": "bbaa147721ae2f93797104e345d9b5fe0336eaa9", "size": "4202", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "gem/migrations/0033_auto_20190123_1658.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "635572" }, { "name": "Dockerfile", "bytes": "414" }, { "name": "HTML", "bytes": "669724" }, { "name": "JavaScript", "bytes": "9619" }, { "name": "Python", "bytes": "308218" } ], "symlink_target": "" }
import csv from io import StringIO import queue import threading from cdrgen.sources import UniformSource from cdrgen.utils import asterisk_like, csv_to_cdr, time_of_day class CDRStream(threading.Thread): def __init__(self, formatter, source): self.formatter = formatter self.output = StringIO() self.writer = csv.writer(self.output, delimiter=",", quoting=csv.QUOTE_ALL) self.source = source self.queue = queue.Queue() super(CDRStream, self).__init__() def run(self): for cdr in self.source: self.queue.put(cdr) def __iter__(self): return self def __next__(self): try: cdr = self.queue.get(timeout=1) except queue.Empty: raise StopIteration self.writer.writerow(self.formatter(*cdr)) val = self.output.getvalue().strip() self.output.truncate(0) self.output.seek(0) return val
{ "content_hash": "25465faeca6d57145f83cb4fd93ecf8d", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 83, "avg_line_length": 28.058823529411764, "alnum_prop": 0.6153039832285115, "repo_name": "rrader/cdr-tools", "id": "3c976606c7881f0aa9c80a95d926bcbe18016019", "size": "954", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "generator/cdrgen/generate.py", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "12612" }, { "name": "Python", "bytes": "35486" }, { "name": "Ruby", "bytes": "2018" }, { "name": "Shell", "bytes": "3774" } ], "symlink_target": "" }
import torch from typing import Union, Optional from os import PathLike from allennlp.fairness.bias_mitigators import ( HardBiasMitigator, LinearBiasMitigator, INLPBiasMitigator, OSCaRBiasMitigator, ) from allennlp.fairness.bias_direction_wrappers import BiasDirectionWrapper from allennlp.fairness.bias_utils import load_word_pairs from allennlp.common import Registrable from allennlp.data.tokenizers.tokenizer import Tokenizer from allennlp.data import Vocabulary class BiasMitigatorWrapper(Registrable): """ Parent class for bias mitigator wrappers. """ def train(self, mode: bool = True): """ # Parameters mode : `bool`, optional (default=`True`) Sets `requires_grad` to value of `mode` for bias mitigator and associated bias direction. """ raise NotImplementedError # TODO: remove equalize words from evaluation words @BiasMitigatorWrapper.register("hard") class HardBiasMitigatorWrapper(BiasMitigatorWrapper): """ # Parameters bias_direction : `BiasDirectionWrapper` Bias direction used by mitigator. embedding_layer : `torch.nn.Embedding` Embedding layer of base model. equalize_word_pairs_file : `Union[PathLike, str]` Path of file containing equalize word pairs. tokenizer : `Tokenizer` Tokenizer used to tokenize equalize words. mitigator_vocab : `Vocabulary`, optional (default=`None`) Vocabulary of tokenizer. If `None`, assumes tokenizer is of type `PreTrainedTokenizer` and uses tokenizer's `vocab` attribute. namespace : `str`, optional (default=`"tokens"`) Namespace of mitigator_vocab to use when tokenizing. Disregarded when mitigator_vocab is `None`. requires_grad : `bool`, optional (default=`True`) Option to enable gradient calculation for bias mitigator. """ def __init__( self, bias_direction: BiasDirectionWrapper, embedding_layer: torch.nn.Embedding, equalize_word_pairs_file: Union[PathLike, str], tokenizer: Tokenizer, mitigator_vocab: Optional[Vocabulary] = None, namespace: str = "tokens", requires_grad: bool = True, ): # use predetermined bias direction self.bias_direction = bias_direction self.predetermined_bias_direction = self.bias_direction(embedding_layer) self.ids1, self.ids2 = load_word_pairs( equalize_word_pairs_file, tokenizer, mitigator_vocab, namespace ) self.mitigator = HardBiasMitigator(requires_grad=requires_grad) def __call__(self, module, module_in, module_out): """ Called as forward hook. """ # embed subword token IDs and mean pool to get # embedding of original word ids1_embeddings = [] for i in self.ids1: i = i.to(module.weight.device) ids1_embeddings.append( torch.mean(module.forward(i), dim=0, keepdim=True) ) # forward() does not trigger hooks, thereby avoiding infinite recursion ids2_embeddings = [] for i in self.ids2: i = i.to(module.weight.device) ids2_embeddings.append(torch.mean(module.forward(i), dim=0, keepdim=True)) ids1_embeddings = torch.cat(ids1_embeddings) ids2_embeddings = torch.cat(ids2_embeddings) module_out_size = module_out.size() # flatten tensor except for last dimension module_out = module_out.flatten(end_dim=-2) # only return bias-mitigated evaluation embeddings module_out = self.mitigator( module_out, self.predetermined_bias_direction.to(module_out.device), ids1_embeddings.to(module_out.device), ids2_embeddings.to(module_out.device), )[: module_out.size(0)] return module_out.reshape(module_out_size) def train(self, mode: bool = True): self.mitigator.requires_grad = mode self.bias_direction.train(mode) @BiasMitigatorWrapper.register("linear") class LinearBiasMitigatorWrapper(BiasMitigatorWrapper): """ # Parameters bias_direction : `BiasDirectionWrapper` Bias direction used by mitigator. embedding_layer : `torch.nn.Embedding` Embedding layer of base model. requires_grad : `bool`, optional (default=`True`) Option to enable gradient calculation for bias mitigator. """ def __init__( self, bias_direction: BiasDirectionWrapper, embedding_layer: torch.nn.Embedding, requires_grad: bool = True, ): # use predetermined bias direction self.bias_direction = bias_direction self.predetermined_bias_direction = self.bias_direction(embedding_layer) self.mitigator = LinearBiasMitigator(requires_grad=requires_grad) def __call__(self, module, module_in, module_out): """ Called as forward hook. """ module_out_size = module_out.size() # flatten tensor except for last dimension module_out = module_out.flatten(end_dim=-2) module_out = self.mitigator( module_out, self.predetermined_bias_direction.to(module_out.device) ) return module_out.reshape(module_out_size) def train(self, mode: bool = True): self.mitigator.requires_grad = mode self.bias_direction.train(mode) @BiasMitigatorWrapper.register("inlp") class INLPBiasMitigatorWrapper(BiasMitigatorWrapper): """ # Parameters embedding_layer : `torch.nn.Embedding` Embedding layer of base model. seed_word_pairs_file : `Union[PathLike, str]` Path of file containing seed word pairs. tokenizer : `Tokenizer` Tokenizer used to tokenize seed words. mitigator_vocab : `Vocabulary`, optional (default=`None`) Vocabulary of tokenizer. If `None`, assumes tokenizer is of type `PreTrainedTokenizer` and uses tokenizer's `vocab` attribute. namespace : `str`, optional (default=`"tokens"`) Namespace of mitigator_vocab to use when tokenizing. Disregarded when mitigator_vocab is `None`. """ def __init__( self, embedding_layer: torch.nn.Embedding, seed_word_pairs_file: Union[PathLike, str], tokenizer: Tokenizer, mitigator_vocab: Optional[Vocabulary] = None, namespace: str = "tokens", ): self.ids1, self.ids2 = load_word_pairs( seed_word_pairs_file, tokenizer, mitigator_vocab, namespace ) self.mitigator = INLPBiasMitigator() def __call__(self, module, module_in, module_out): """ Called as forward hook. """ # embed subword token IDs and mean pool to get # embedding of original word ids1_embeddings = [] for i in self.ids1: i = i.to(module.weight.device) ids1_embeddings.append(torch.mean(module.forward(i), dim=0, keepdim=True)) ids2_embeddings = [] for i in self.ids2: i = i.to(module.weight.device) ids2_embeddings.append(torch.mean(module.forward(i), dim=0, keepdim=True)) ids1_embeddings = torch.cat(ids1_embeddings) ids2_embeddings = torch.cat(ids2_embeddings) module_out_size = module_out.size() # flatten tensor except for last dimension module_out = module_out.flatten(end_dim=-2) module_out = self.mitigator( module_out, ids1_embeddings.to(module_out.device), ids2_embeddings.to(module_out.device) ) return module_out.reshape(module_out_size) def train(self, mode: bool = True): pass @BiasMitigatorWrapper.register("oscar") class OSCaRBiasMitigatorWrapper(BiasMitigatorWrapper): """ # Parameters bias_direction1 : `BiasDirectionWrapper` Bias direction of first concept subspace used by mitigator. bias_direction2 : `BiasDirectionWrapper` Bias direction of second concept subspace used by mitigator. embedding_layer : `torch.nn.Embedding` Embedding layer of base model. requires_grad : `bool`, optional (default=`True`) Option to enable gradient calculation for bias mitigator. """ def __init__( self, bias_direction1: BiasDirectionWrapper, bias_direction2: BiasDirectionWrapper, embedding_layer: torch.nn.Embedding, requires_grad: bool = True, ): # use predetermined bias directions self.bias_direction1 = bias_direction1 self.predetermined_bias_direction1 = self.bias_direction1(embedding_layer) self.bias_direction2 = bias_direction2(embedding_layer) self.predetermined_bias_direction2 = self.bias_direction2(embedding_layer) self.mitigator = OSCaRBiasMitigator(requires_grad=requires_grad) def __call__(self, module, module_in, module_out): """ Called as forward hook. """ module_out_size = module_out.size() # flatten tensor except for last dimension module_out = module_out.flatten(end_dim=-2) module_out = self.mitigator( module_out, self.predetermined_bias_direction1.to(module_out.device), self.predetermined_bias_direction2.to(module_out.device), ) return module_out.reshape(module_out_size) def train(self, mode: bool = True): self.mitigator.requires_grad = mode self.bias_direction1.train(mode) self.bias_direction2.train(mode)
{ "content_hash": "483f72685ed32d5a294652dcc2112bd6", "timestamp": "", "source": "github", "line_count": 266, "max_line_length": 100, "avg_line_length": 36.00751879699248, "alnum_prop": 0.6500313217790771, "repo_name": "allenai/allennlp", "id": "6351a6cceac778609067b7c8ec3a17382f00e5fc", "size": "9578", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "allennlp/fairness/bias_mitigator_wrappers.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "39870" }, { "name": "Dockerfile", "bytes": "1190" }, { "name": "Jsonnet", "bytes": "4469" }, { "name": "Makefile", "bytes": "5306" }, { "name": "Perl", "bytes": "101" }, { "name": "Python", "bytes": "3575059" }, { "name": "Scilab", "bytes": "4085" }, { "name": "Shell", "bytes": "2092" } ], "symlink_target": "" }
import copy import datetime from django.forms import Select from django.test import override_settings from django.utils.safestring import mark_safe from .base import WidgetTest class SelectTest(WidgetTest): widget = Select nested_widget = Select(choices=( ('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))), )) def test_render(self): self.check_html(self.widget(choices=self.beatles), 'beatle', 'J', html=( """<select name="beatle"> <option value="J" selected>John</option> <option value="P">Paul</option> <option value="G">George</option> <option value="R">Ringo</option> </select>""" )) def test_render_none(self): """ If the value is None, none of the options are selected. """ self.check_html(self.widget(choices=self.beatles), 'beatle', None, html=( """<select name="beatle"> <option value="J">John</option> <option value="P">Paul</option> <option value="G">George</option> <option value="R">Ringo</option> </select>""" )) def test_render_label_value(self): """ If the value corresponds to a label (but not to an option value), none of the options are selected. """ self.check_html(self.widget(choices=self.beatles), 'beatle', 'John', html=( """<select name="beatle"> <option value="J">John</option> <option value="P">Paul</option> <option value="G">George</option> <option value="R">Ringo</option> </select>""" )) def test_render_selected(self): """ Only one option can be selected (#8103). """ choices = [('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('0', 'extra')] self.check_html(self.widget(choices=choices), 'choices', '0', html=( """<select name="choices"> <option value="0" selected>0</option> <option value="1">1</option> <option value="2">2</option> <option value="3">3</option> <option value="0">extra</option> </select>""" )) def test_constructor_attrs(self): """ Select options shouldn't inherit the parent widget attrs. """ widget = Select( attrs={'class': 'super', 'id': 'super'}, choices=[(1, 1), (2, 2), (3, 3)], ) self.check_html(widget, 'num', 2, html=( """<select name="num" class="super" id="super"> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> </select>""" )) def test_compare_to_str(self): """ The value is compared to its str(). """ self.check_html( self.widget(choices=[('1', '1'), ('2', '2'), ('3', '3')]), 'num', 2, html=( """<select name="num"> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> </select>""" ), ) self.check_html( self.widget(choices=[(1, 1), (2, 2), (3, 3)]), 'num', '2', html=( """<select name="num"> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> </select>""" ), ) self.check_html( self.widget(choices=[(1, 1), (2, 2), (3, 3)]), 'num', 2, html=( """<select name="num"> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> </select>""" ), ) def test_choices_constructor(self): widget = Select(choices=[(1, 1), (2, 2), (3, 3)]) self.check_html(widget, 'num', 2, html=( """<select name="num"> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> </select>""" )) def test_choices_constructor_generator(self): """ If choices is passed to the constructor and is a generator, it can be iterated over multiple times without getting consumed. """ def get_choices(): for i in range(5): yield (i, i) widget = Select(choices=get_choices()) self.check_html(widget, 'num', 2, html=( """<select name="num"> <option value="0">0</option> <option value="1">1</option> <option value="2" selected>2</option> <option value="3">3</option> <option value="4">4</option> </select>""" )) self.check_html(widget, 'num', 3, html=( """<select name="num"> <option value="0">0</option> <option value="1">1</option> <option value="2">2</option> <option value="3" selected>3</option> <option value="4">4</option> </select>""" )) def test_choices_escaping(self): choices = (('bad', 'you & me'), ('good', mark_safe('you &gt; me'))) self.check_html(self.widget(choices=choices), 'escape', None, html=( """<select name="escape"> <option value="bad">you &amp; me</option> <option value="good">you &gt; me</option> </select>""" )) def test_choices_unicode(self): self.check_html( self.widget(choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), 'email', 'ŠĐĆŽćžšđ', html=( """<select name="email"> <option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111 </option> <option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option> </select>""" ), ) def test_choices_optgroup(self): """ Choices can be nested one level in order to create HTML optgroups. """ self.check_html(self.nested_widget, 'nestchoice', None, html=( """<select name="nestchoice"> <option value="outer1">Outer 1</option> <optgroup label="Group &quot;1&quot;"> <option value="inner1">Inner 1</option> <option value="inner2">Inner 2</option> </optgroup> </select>""" )) def test_choices_select_outer(self): self.check_html(self.nested_widget, 'nestchoice', 'outer1', html=( """<select name="nestchoice"> <option value="outer1" selected>Outer 1</option> <optgroup label="Group &quot;1&quot;"> <option value="inner1">Inner 1</option> <option value="inner2">Inner 2</option> </optgroup> </select>""" )) def test_choices_select_inner(self): self.check_html(self.nested_widget, 'nestchoice', 'inner1', html=( """<select name="nestchoice"> <option value="outer1">Outer 1</option> <optgroup label="Group &quot;1&quot;"> <option value="inner1" selected>Inner 1</option> <option value="inner2">Inner 2</option> </optgroup> </select>""" )) @override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True) def test_doesnt_localize_option_value(self): choices = [ (1, 'One'), (1000, 'One thousand'), (1000000, 'One million'), ] html = """ <select name="number"> <option value="1">One</option> <option value="1000">One thousand</option> <option value="1000000">One million</option> </select> """ self.check_html(self.widget(choices=choices), 'number', None, html=html) choices = [ (datetime.time(0, 0), 'midnight'), (datetime.time(12, 0), 'noon'), ] html = """ <select name="time"> <option value="00:00:00">midnight</option> <option value="12:00:00">noon</option> </select> """ self.check_html(self.widget(choices=choices), 'time', None, html=html) def test_options(self): options = list(self.widget(choices=self.beatles).options( 'name', ['J'], attrs={'class': 'super'}, )) self.assertEqual(len(options), 4) self.assertEqual(options[0]['name'], 'name') self.assertEqual(options[0]['value'], 'J') self.assertEqual(options[0]['label'], 'John') self.assertEqual(options[0]['index'], '0') self.assertEqual(options[0]['selected'], True) # Template-related attributes self.assertEqual(options[1]['name'], 'name') self.assertEqual(options[1]['value'], 'P') self.assertEqual(options[1]['label'], 'Paul') self.assertEqual(options[1]['index'], '1') self.assertEqual(options[1]['selected'], False) def test_optgroups(self): choices = [ ('Audio', [ ('vinyl', 'Vinyl'), ('cd', 'CD'), ]), ('Video', [ ('vhs', 'VHS Tape'), ('dvd', 'DVD'), ]), ('unknown', 'Unknown'), ] groups = list(self.widget(choices=choices).optgroups( 'name', ['vhs'], attrs={'class': 'super'}, )) audio, video, unknown = groups label, options, index = audio self.assertEqual(label, 'Audio') self.assertEqual( options, [{ 'value': 'vinyl', 'type': 'select', 'attrs': {}, 'index': '0_0', 'label': 'Vinyl', 'template_name': 'django/forms/widgets/select_option.html', 'name': 'name', 'selected': False, 'wrap_label': True, }, { 'value': 'cd', 'type': 'select', 'attrs': {}, 'index': '0_1', 'label': 'CD', 'template_name': 'django/forms/widgets/select_option.html', 'name': 'name', 'selected': False, 'wrap_label': True, }] ) self.assertEqual(index, 0) label, options, index = video self.assertEqual(label, 'Video') self.assertEqual( options, [{ 'value': 'vhs', 'template_name': 'django/forms/widgets/select_option.html', 'label': 'VHS Tape', 'attrs': {'selected': True}, 'index': '1_0', 'name': 'name', 'selected': True, 'type': 'select', 'wrap_label': True, }, { 'value': 'dvd', 'template_name': 'django/forms/widgets/select_option.html', 'label': 'DVD', 'attrs': {}, 'index': '1_1', 'name': 'name', 'selected': False, 'type': 'select', 'wrap_label': True, }] ) self.assertEqual(index, 1) label, options, index = unknown self.assertEqual(label, None) self.assertEqual( options, [{ 'value': 'unknown', 'selected': False, 'template_name': 'django/forms/widgets/select_option.html', 'label': 'Unknown', 'attrs': {}, 'index': '2', 'name': 'name', 'type': 'select', 'wrap_label': True, }] ) self.assertEqual(index, 2) def test_optgroups_integer_choices(self): """The option 'value' is the same type as what's in `choices`.""" groups = list(self.widget(choices=[[0, 'choice text']]).optgroups('name', ['vhs'])) label, options, index = groups[0] self.assertEqual(options[0]['value'], 0) def test_deepcopy(self): """ __deepcopy__() should copy all attributes properly (#25085). """ widget = Select() obj = copy.deepcopy(widget) self.assertIsNot(widget, obj) self.assertEqual(widget.choices, obj.choices) self.assertIsNot(widget.choices, obj.choices) self.assertEqual(widget.attrs, obj.attrs) self.assertIsNot(widget.attrs, obj.attrs) def test_doesnt_render_required_when_impossible_to_select_empty_field(self): widget = self.widget(choices=[('J', 'John'), ('P', 'Paul')]) self.assertIs(widget.use_required_attribute(initial=None), False) def test_renders_required_when_possible_to_select_empty_field_str(self): widget = self.widget(choices=[('', 'select please'), ('P', 'Paul')]) self.assertIs(widget.use_required_attribute(initial=None), True) def test_renders_required_when_possible_to_select_empty_field_list(self): widget = self.widget(choices=[['', 'select please'], ['P', 'Paul']]) self.assertIs(widget.use_required_attribute(initial=None), True) def test_renders_required_when_possible_to_select_empty_field_none(self): widget = self.widget(choices=[(None, 'select please'), ('P', 'Paul')]) self.assertIs(widget.use_required_attribute(initial=None), True) def test_doesnt_render_required_when_no_choices_are_available(self): widget = self.widget(choices=[]) self.assertIs(widget.use_required_attribute(initial=None), False)
{ "content_hash": "1642c7482abad9f44865c249bdd6b8bd", "timestamp": "", "source": "github", "line_count": 392, "max_line_length": 93, "avg_line_length": 35.984693877551024, "alnum_prop": 0.49262725081525593, "repo_name": "schinckel/django", "id": "fd4c1fb34c4effebb101ef4a47ab09ab76e5b832", "size": "14138", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tests/forms_tests/widget_tests/test_select.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "85024" }, { "name": "HTML", "bytes": "224566" }, { "name": "JavaScript", "bytes": "251536" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "13234142" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }
import logging import weakref from king_phisher import find from king_phisher import utilities from king_phisher.client import dialogs from king_phisher.client import export from king_phisher.client import graphs from king_phisher.client import gui_utilities from king_phisher.client.widget import extras from king_phisher.client.windows import plugin_manager from king_phisher.client.windows import rpc_terminal from king_phisher.client.windows import campaign_import from king_phisher.client.windows import compare_campaigns from king_phisher.client.tabs.campaign import CampaignViewTab from king_phisher.client.tabs.campaign import CampaignViewGenericTableTab from king_phisher.client.tabs.mail import MailSenderTab from king_phisher.constants import ConnectionErrorReason from gi.repository import Gdk from gi.repository import GdkPixbuf from gi.repository import Gtk import xlsxwriter if isinstance(Gtk.Widget, utilities.Mock): _Gtk_ApplicationWindow = type('Gtk.ApplicationWindow', (object,), {'__module__': ''}) else: _Gtk_ApplicationWindow = Gtk.ApplicationWindow __all__ = ('MainAppWindow',) class MainMenuBar(gui_utilities.GladeGObject): """ The main menu bar for the primary application window. This configures any optional menu items as well as handles all the menu item signals appropriately. """ dependencies = gui_utilities.GladeDependencies( top_level=( 'StockDeleteImage', 'StockEditImage', 'StockHelpImage', 'StockPropertiesImage', 'StockStopImage' ) ) top_gobject = 'menubar' def __init__(self, application, window): utilities.assert_arg_type(application, Gtk.Application, arg_pos=1) utilities.assert_arg_type(window, MainAppWindow, arg_pos=2) super(MainMenuBar, self).__init__(application) self.window = weakref.proxy(window) self._add_accelerators() graphs_menu_item = self.gtk_builder_get('menuitem_tools_create_graph') if graphs.has_matplotlib: graphs_submenu = Gtk.Menu.new() for graph_name in graphs.get_graphs(): graph = graphs.get_graph(graph_name) menu_item = Gtk.MenuItem.new_with_label(graph.name_human) menu_item.connect('activate', self.signal_activate_tools_show_campaign_graph, graph_name) graphs_submenu.append(menu_item) graphs_menu_item.set_submenu(graphs_submenu) graphs_menu_item.show_all() else: graphs_menu_item.set_sensitive(False) def _add_accelerators(self): accelerators = ( ('file_open', Gdk.KEY_o, Gdk.ModifierType.CONTROL_MASK), ('file_quit', Gdk.KEY_q, Gdk.ModifierType.CONTROL_MASK), ('tools_rpc_terminal', Gdk.KEY_F1, Gdk.ModifierType.CONTROL_MASK), ('tools_sftp_client', Gdk.KEY_F2, Gdk.ModifierType.CONTROL_MASK) ) for menu_name, key, modifier in accelerators: menu_item = self.gtk_builder_get('menuitem_' + menu_name) menu_item.add_accelerator('activate', self.window.accel_group, key, modifier, Gtk.AccelFlags.VISIBLE) def signal_activate_edit_configure_campaign(self, _): self.application.campaign_configure() def signal_activate_edit_delete_campaign(self, _): if not gui_utilities.show_dialog_yes_no('Delete This Campaign?', self.application.get_active_window(), 'This action is irreversible, all campaign data will be lost.'): return self.application.emit('campaign-delete', self.config['campaign_id']) def signal_activate_edit_preferences(self, _): self.application.show_preferences() def signal_activate_edit_stop_service(self, _): self.application.stop_remote_service() def signal_activate_edit_companies(self, _): dialogs.CompanyEditorDialog(self.application).interact() def signal_activate_edit_tags(self, _): dialogs.TagEditorDialog(self.application).interact() def signal_activate_export_campaign_xlsx(self, _): self.window.export_campaign_xlsx() def signal_activate_export_campaign_xml(self, _): self.window.export_campaign_xml() def signal_activate_export_message_data(self, _): self.window.export_message_data() def signal_activate_export_credentials_csv(self, _): campaign_tab = self.window.tabs['campaign'] credentials_tab = campaign_tab.tabs['credentials'] credentials_tab.export_table_to_csv() def signal_activate_export_credentials_msf_txt(self, _): dialog = extras.FileChooserDialog('Export Credentials', self.application.get_active_window()) file_name = self.config['campaign_name'] + '.txt' response = dialog.run_quick_save(file_name) dialog.destroy() if not response: return destination_file = response['target_path'] export.campaign_credentials_to_msf_txt(self.application.rpc, self.config['campaign_id'], destination_file) def signal_activate_export_messages_csv(self, _): campaign_tab = self.window.tabs['campaign'] messages_tab = campaign_tab.tabs['messages'] messages_tab.export_table_to_csv() def signal_activate_export_visits_csv(self, _): campaign_tab = self.window.tabs['campaign'] visits_tab = campaign_tab.tabs['visits'] visits_tab.export_table_to_csv() def signal_activate_export_visits_geojson(self, _): self.window.export_campaign_visit_geojson() def signal_activate_import_message_data(self, _): self.window.import_message_data() def signal_activate_import_campaign_xml(self, _): campaign_import.ImportCampaignWindow(self.application) def signal_activate_show_campaign_selection(self, _): self.application.show_campaign_selection() def signal_activate_quit(self, _): self.application.quit(optional=True) def signal_activate_tools_rpc_terminal(self, _): rpc_terminal.RPCTerminal(self.application) def signal_activate_tools_clone_page(self, _): dialogs.ClonePageDialog(self.application).interact() def signal_activate_tools_compare_campaigns(self, _): compare_campaigns.CampaignCompWindow(self.application) def signal_activate_tools_manage_plugins(self, _): plugin_manager.PluginManagerWindow(self.application) def signal_activate_tools_sftp_client(self, _): self.application.emit('sftp-client-start') def signal_activate_tools_show_campaign_graph(self, _, graph_name): self.application.show_campaign_graph(graph_name) def signal_activate_help_about(self, _): dialogs.AboutDialog(self.application).interact() def signal_activate_help_templates(self, _): utilities.open_uri('https://github.com/securestate/king-phisher-templates') def signal_activate_help_wiki(self, _): utilities.open_uri('https://github.com/securestate/king-phisher/wiki') class MainAppWindow(_Gtk_ApplicationWindow): """ This is the top level King Phisher client window. This is also the parent window for most GTK objects. """ def __init__(self, config, application): """ :param dict config: The main King Phisher client configuration. :param application: The application instance to which this window belongs. :type application: :py:class:`.KingPhisherClientApplication` """ utilities.assert_arg_type(application, Gtk.Application, arg_pos=2) super(MainAppWindow, self).__init__(application=application) self.application = application self.logger = logging.getLogger('KingPhisher.Client.MainWindow') self.config = config """The main King Phisher client configuration.""" self.set_property('title', 'King Phisher') vbox = Gtk.Box() vbox.set_property('orientation', Gtk.Orientation.VERTICAL) vbox.show() self.add(vbox) default_icon_file = find.data_file('king-phisher-icon.svg') if default_icon_file: icon_pixbuf = GdkPixbuf.Pixbuf.new_from_file(default_icon_file) self.set_default_icon(icon_pixbuf) self.accel_group = Gtk.AccelGroup() self.add_accel_group(self.accel_group) self.menu_bar = MainMenuBar(application, self) vbox.pack_start(self.menu_bar.menubar, False, False, 0) # create notebook and tabs self.notebook = Gtk.Notebook() """The primary :py:class:`Gtk.Notebook` that holds the top level taps of the client GUI.""" self.notebook.connect('switch-page', self.signal_notebook_switch_page) self.notebook.set_scrollable(True) vbox.pack_start(self.notebook, True, True, 0) self.tabs = {} current_page = self.notebook.get_current_page() self.last_page_id = current_page mailer_tab = MailSenderTab(self, self.application) self.tabs['mailer'] = mailer_tab self.notebook.insert_page(mailer_tab.box, mailer_tab.label, current_page + 1) self.notebook.set_current_page(current_page + 1) campaign_tab = CampaignViewTab(self, self.application) campaign_tab.box.show() self.tabs['campaign'] = campaign_tab self.notebook.insert_page(campaign_tab.box, campaign_tab.label, current_page + 2) self.set_position(Gtk.WindowPosition.CENTER_ALWAYS) self.set_size_request(800, 600) self.connect('delete-event', self.signal_delete_event) self.notebook.show() self.show() self.rpc = None # needs to be initialized last """The :py:class:`.KingPhisherRPCClient` instance.""" self.application.connect('server-connected', self.signal_kp_server_connected) self.login_dialog = dialogs.LoginDialog(self.application) self.login_dialog.dialog.connect('response', self.signal_login_dialog_response) self.login_dialog.dialog.show() def signal_notebook_switch_page(self, notebook, current_page, index): #previous_page = notebook.get_nth_page(self.last_page_id) self.last_page_id = index mailer_tab = self.tabs.get('mailer') campaign_tab = self.tabs.get('campaign') notebook = None if mailer_tab and current_page == mailer_tab.box: notebook = mailer_tab.notebook elif campaign_tab and current_page == campaign_tab.box: notebook = campaign_tab.notebook if notebook: index = notebook.get_current_page() notebook.emit('switch-page', notebook.get_nth_page(index), index) def signal_delete_event(self, x, y): self.application.emit('exit-confirm') return True def signal_kp_server_connected(self, _): self.rpc = self.application.rpc if self.login_dialog: self.login_dialog.destroy() self.login_dialog = None def signal_login_dialog_response(self, dialog, response): if response == Gtk.ResponseType.CANCEL or response == Gtk.ResponseType.DELETE_EVENT: dialog.destroy() self.application.emit('exit') return True self.login_dialog.objects_save_to_config() username = self.config['server_username'] password = self.config['server_password'] otp = self.config['server_one_time_password'] if not otp: otp = None _, reason = self.application.server_connect(username, password, otp) if reason == ConnectionErrorReason.ERROR_INVALID_OTP: revealer = self.login_dialog.gobjects['revealer_server_one_time_password'] if revealer.get_child_revealed(): gui_utilities.show_dialog_error('Login Failed', self, 'A valid one time password (OTP) token is required.') else: revealer.set_reveal_child(True) entry = self.login_dialog.gobjects['entry_server_one_time_password'] entry.grab_focus() elif reason == ConnectionErrorReason.ERROR_INVALID_CREDENTIALS: gui_utilities.show_dialog_error('Login Failed', self, 'The provided credentials are incorrect.') elif reason == ConnectionErrorReason.ERROR_UNKNOWN: gui_utilities.show_dialog_error('Login Failed', self, 'An unknown error has occurred.') def export_campaign_xlsx(self): """Export the current campaign to an Excel compatible XLSX workbook.""" dialog = extras.FileChooserDialog('Export Campaign To Excel', self) file_name = self.config['campaign_name'] + '.xlsx' response = dialog.run_quick_save(file_name) dialog.destroy() if not response: return destination_file = response['target_path'] campaign_tab = self.tabs['campaign'] workbook = xlsxwriter.Workbook(destination_file) title_format = workbook.add_format({'bold': True, 'size': 18}) for tab_name, tab in campaign_tab.tabs.items(): if not isinstance(tab, CampaignViewGenericTableTab): continue tab.export_table_to_xlsx_worksheet(workbook.add_worksheet(tab_name), title_format) workbook.close() def export_campaign_xml(self): """Export the current campaign to an XML data file.""" dialog = extras.FileChooserDialog('Export Campaign XML Data', self) file_name = self.config['campaign_name'] + '.xml' response = dialog.run_quick_save(file_name) dialog.destroy() if not response: return destination_file = response['target_path'] export.campaign_to_xml(self.rpc, self.config['campaign_id'], destination_file) def export_message_data(self, *args, **kwargs): self.tabs['mailer'].export_message_data(*args, **kwargs) def export_campaign_visit_geojson(self): """ Export the current campaign visit information to a GeoJSON data file. """ dialog = extras.FileChooserDialog('Export Campaign Visit GeoJSON Data', self) file_name = self.config['campaign_name'] + '.geojson' response = dialog.run_quick_save(file_name) dialog.destroy() if not response: return destination_file = response['target_path'] export.campaign_visits_to_geojson(self.rpc, self.config['campaign_id'], destination_file) def import_message_data(self, *args, **kwargs): self.tabs['mailer'].import_message_data(*args, **kwargs)
{ "content_hash": "2517baef488906ae5dbfa6392be20f28", "timestamp": "", "source": "github", "line_count": 340, "max_line_length": 169, "avg_line_length": 38.15882352941176, "alnum_prop": 0.7441035917989826, "repo_name": "hdemeyer/king-phisher", "id": "b118c3b6b7d857491740c406230a961cc9ed3a12", "size": "14562", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "king_phisher/client/windows/main.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "33168" }, { "name": "HTML", "bytes": "552" }, { "name": "JavaScript", "bytes": "1328" }, { "name": "Jupyter Notebook", "bytes": "11394" }, { "name": "Mako", "bytes": "574" }, { "name": "Python", "bytes": "966857" }, { "name": "Ruby", "bytes": "7629" } ], "symlink_target": "" }
from bs4 import BeautifulSoup import urllib #csv is for the csv writer import csv #initiates the dictionary to hold the output holder = {} #opens the input doc txt = open("adder.txt") #is the contents of the doc #opens the output doc output_txt = open("adder_output.txt", "w") print txt def headliner(url): parsed_urls = csv.reader(url) for row in parsed_urls: number = 0 row_contents = row[number] print row_contents number += 1 if "rfa" in row_contents: #opens the url for read access this_url = urllib.urlopen(row_contents).read() #creates a new BS holder based on the URL soup = BeautifulSoup(this_url, 'lxml') #creates the headline section headline_text = 'Radio Free Asia: ' headline = soup.find_all('title') for element in headline: headline_text += ''.join(element.findAll(text = True)).encode('utf-8').strip() #creats the body text #This turns the html text into regular text article_text = row_contents + "\n" + "\r" #This finds each paragraph article = soup.find("div", {"id" : "storytext"}).findAll('p') #for each paragraph for element in article: #add a line break and then the text part of the paragraph #the .encode part fixes unicode bullshit article_text += '\n' + ''.join(element.findAll(text = True)).encode('utf-8').strip() holder[headline_text] = article_text output_txt.write(str(headline_text)) output_txt.write("\n") output_txt.write("\r") output_txt.write("\r") output_txt.write(str(headline_text)) output_txt.write("\n") output_txt.write(str(article_text)) output_txt.write("\n") output_txt.write("\r") output_txt.write("\r") output_txt.write("\r") output_txt.write("\r") else: print "didn't find a matching URL" headliner(txt) #this is just for debugging print holder txt.close() output_txt.close()
{ "content_hash": "ed8f72f02f9cf05975945723e86f5d0f", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 100, "avg_line_length": 25.03370786516854, "alnum_prop": 0.5619389587073609, "repo_name": "mwweinberg/china-daily-email", "id": "8f194b1ba9bea51c90402a0e46efd1637fa062b3", "size": "2228", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "adder.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "87031" }, { "name": "Python", "bytes": "1389540" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('initiatives', '0005_initiative_hasorganization'), ] operations = [ migrations.RenameField( model_name='initiative', old_name='hasOrganization', new_name='has_organization', ), ]
{ "content_hash": "0fdc301e4d4fcc10edfb8506572d13bd", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 59, "avg_line_length": 21.666666666666668, "alnum_prop": 0.6102564102564103, "repo_name": "onepercentclub/bluebottle", "id": "672aac3d61aa742b3485d51f572293e4df880535", "size": "464", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bluebottle/initiatives/migrations/0006_auto_20190416_1553.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "41694" }, { "name": "HTML", "bytes": "246695" }, { "name": "Handlebars", "bytes": "63" }, { "name": "JavaScript", "bytes": "139123" }, { "name": "PHP", "bytes": "35" }, { "name": "PLpgSQL", "bytes": "1369882" }, { "name": "PostScript", "bytes": "2927" }, { "name": "Python", "bytes": "4983116" }, { "name": "Rich Text Format", "bytes": "39109" }, { "name": "SCSS", "bytes": "99555" }, { "name": "Shell", "bytes": "3068" }, { "name": "Smarty", "bytes": "3814" } ], "symlink_target": "" }
from jax import lax import numpy as np from distla_core.analysis.errors import errors_polar from distla_core.utils import pops if __name__ == "__main__": local_rows = np.array([128, 256, 512, 1024, 2048]) OUT_ROWS = tuple(pops.NROWS * local_rows) OUT_COLS = (None, ) DTYPES = (np.float32,) # dtype of the matrices P_SZS = (256,) # panel size of the SUMMA multiplications PRECISIONS = (lax.Precision.DEFAULT, lax.Precision.HIGH, lax.Precision.HIGHEST,) # ASIC matmul precision SEEDS = (None,) # Random seed to initialize input; system clock if None. SERIAL = (False,) # Whether to run in serial or distributed mode. EPS = (None, ) # Convergence threshold. MAXITER = (50,) # When to terminate if convergence stagnates. S_MIN_EST = (None, -1) # Estimated lowest singular value; # None means machine epsilon; -1 means the true value. S_THRESH = (0., 0.1) # When to switch to Newton-Schulz from `rogue` iteration. S_MIN = (1E-5, 1E-4, 1E-3, 1E-2, 0.1) # Smallest nonzero singular value. S_MAX = (1.0,) # Largest singular value of the input matrix. N_ZERO_SVS = (0, 10) # The number of zero singular values in the input matrix. SV_DISTRIBUTION = ("linear",) # `linear` or `geometric` distribution of # singular values in the input matrix. COMPUTE_RHO = (True,) # Whether to compute the positive-semidefinite factor # in addition to the unitary one. BATCH_SIZE = 1 # How many runs to assemblage REDUCTION_MODE = "min" # how to assemblage results OUTPUT_DIR_PATH = None # directory of output; CWD if None OUTPUT_NAME = "errors_polar" # output saved to OUTPUT_NAME.csv _ = errors_polar.errors_polar( OUT_ROWS, out_cols=OUT_COLS, dtypes=DTYPES, p_szs=P_SZS, precisions=PRECISIONS, seeds=SEEDS, serial=SERIAL, eps=EPS, maxiter=MAXITER, s_min_est=S_MIN_EST, s_thresh=S_THRESH, s_min=S_MIN, s_max=S_MAX, n_zero_svs=N_ZERO_SVS, sv_distribution=SV_DISTRIBUTION, compute_rho=COMPUTE_RHO, batch_size=BATCH_SIZE, reduction_mode=REDUCTION_MODE, output_dir_path=OUTPUT_DIR_PATH, output_name=OUTPUT_NAME)
{ "content_hash": "da0fb62001a0ed4e3bd26c4120ae20b7", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 80, "avg_line_length": 52.73170731707317, "alnum_prop": 0.669750231267345, "repo_name": "google/distla_core", "id": "d4c43c8c36c9053bcc3ee093b20f3ea97222099b", "size": "2846", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "distla/distla_core/distla_core/asic_execution/analysis/polar/errors/main.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "1317325" }, { "name": "Shell", "bytes": "5454" } ], "symlink_target": "" }
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name = 'reflectrpc', packages = ['reflectrpc'], version = '0.7.6', description = 'Self-describing JSON-RPC services made easy', long_description=read('CHANGELOG.rst'), author = 'Andreas Heck', author_email = '[email protected]', license = 'MIT', url = 'https://github.com/aheck/reflectrpc', download_url = 'https://github.com/aheck/reflectrpc/archive/v0.7.6.tar.gz', include_package_data=True, keywords = 'json-rpc jsonrpc rpc webservice', scripts = ['rpcsh', 'rpcdoc', 'rpcgencode'], install_requires = ['future', 'service_identity', 'twisted', 'pyOpenSSL'], classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5' ] )
{ "content_hash": "c1669d7280158d2fa2ba214c437749b2", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 81, "avg_line_length": 36.935483870967744, "alnum_prop": 0.5982532751091703, "repo_name": "aheck/reflectrpc", "id": "d0334b6a37313ebb08e5ce8df1e29ce463c72b3c", "size": "1145", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1036" }, { "name": "Dockerfile", "bytes": "1592" }, { "name": "HTML", "bytes": "4009" }, { "name": "Python", "bytes": "197421" }, { "name": "Ruby", "bytes": "1733" }, { "name": "Shell", "bytes": "342" } ], "symlink_target": "" }
import functools import re from typing import Dict, Sequence import looker_sdk from looker_sdk import methods, models, error LOOKER_GROUP_PREFIX = "Looker_Hack: " # simple caching mechanism until we have a true class for retaining these IDs HACKATHON_ATTR_ID = None HACKATHON_ROLE = None def try_to(func): """Wrap API calls in try/except """ @functools.wraps(func) def wrapped_f(**kwargs): try: return func(**kwargs) except error.SDKError as ex: raise RegisterError(f"Failed to {func.__name__}: ({ex})") return wrapped_f @try_to def get_hackathon_attr_id(*, sdk: methods.LookerSDK) -> int: global HACKATHON_ATTR_ID if HACKATHON_ATTR_ID is not None: return HACKATHON_ATTR_ID main_hackathon = "hackathon" user_attrs = sdk.all_user_attributes(fields="name,id") for user_attr in user_attrs: if user_attr.name == main_hackathon: assert user_attr.id HACKATHON_ATTR_ID = user_attr.id break else: attrib = sdk.create_user_attribute( body=models.WriteUserAttribute( name=main_hackathon, label="Looker Hackathon", type="string" ) ) if not attrib: raise RegisterError(f"Could not find '{main_hackathon}' user attribute") else: assert attrib.id HACKATHON_ATTR_ID = attrib.id return HACKATHON_ATTR_ID @try_to def get_hackathon_role(*, sdk: methods.LookerSDK) -> models.Role: global HACKATHON_ROLE if HACKATHON_ROLE is not None: return HACKATHON_ROLE for role in sdk.all_roles(fields="name,id"): if role.name == "Hackathon": HACKATHON_ROLE = role assert HACKATHON_ROLE.id break else: raise RegisterError("Hackathon role needs to be created") return HACKATHON_ROLE def register_user( *, hackathon: str, first_name: str, last_name: str, email: str ) -> str: sdk = looker_sdk.init31() user = find_or_create_user( sdk=sdk, first_name=first_name, last_name=last_name, email=email ) assert user.id if not user.credentials_email: create_email_credentials(sdk=sdk, user_id=user.id, email=email) if user.credentials_api3: client_id = user.credentials_api3[0].client_id else: client_id = create_api3_credentials(sdk=sdk, user_id=user.id).client_id set_user_group(sdk=sdk, user_id=user.id, hackathon=hackathon) set_user_attributes(sdk=sdk, user_id=user.id, hackathon=hackathon) disable_user(sdk=sdk, user_id=user.id) assert client_id return client_id def find_or_create_user( *, sdk: methods.LookerSDK, first_name: str, last_name: str, email: str ) -> models.User: try: users = sdk.search_users(email=email) if users: user = users[0] if ( user.first_name != first_name or user.last_name != last_name or user.is_disabled ): assert user.id user = sdk.update_user( user_id=user.id, body=models.WriteUser( first_name=first_name, last_name=last_name, is_disabled=False ), ) else: user = sdk.create_user( models.WriteUser(first_name=first_name, last_name=last_name) ) except error.SDKError as create_ex: raise RegisterError(f"Failed to find or create User ({create_ex})") return user def enable_users_by_hackathons(hackathons: Sequence[str]) -> Dict[str, str]: global LOOKER_GROUP_PREFIX sdk = looker_sdk.init31() groups = {g.name: g.id for g in sdk.all_groups(fields="id,name")} ret = {} for hackathon in hackathons: try: group_id = groups[f"{LOOKER_GROUP_PREFIX}{hackathon}"] except KeyError: raise RegisterError(f"No group found for hackathon: '{hackathon}'") for user in sdk.search_users(group_id=group_id): assert user.id assert user.email sdk.update_user(user_id=user.id, body=models.WriteUser(is_disabled=False)) password_reset_url = sdk.create_user_credentials_email_password_reset( user_id=user.id, expires=False ).password_reset_url assert password_reset_url setup = re.sub("password/reset", "account/setup", password_reset_url) ret[user.email] = setup return ret @try_to def create_email_credentials(*, sdk: methods.LookerSDK, user_id: int, email: str): sdk.create_user_credentials_email( user_id=user_id, body=models.WriteCredentialsEmail(email=email) ) @try_to def create_api3_credentials( *, sdk: methods.LookerSDK, user_id: int ) -> models.CredentialsApi3: return sdk.create_user_credentials_api3( user_id=user_id, body=models.CredentialsApi3() ) @try_to def set_user_group(*, sdk: methods.LookerSDK, user_id: int, hackathon: str): global LOOKER_GROUP_PREFIX # TODO - switch to sdk.search_groups once that method is live on # sandboxcl and hack instances groups = sdk.all_groups(fields="id,name") name = f"{LOOKER_GROUP_PREFIX}{hackathon}" for group in groups: if group.name == name: break else: role = get_hackathon_role(sdk=sdk) assert role.id role_groups = [] for g in sdk.role_groups(role_id=role.id, fields="id"): assert g.id role_groups.append(g.id) group = sdk.create_group(body=models.WriteGroup(name=name)) assert group.id role_groups.append(group.id) sdk.set_role_groups(role_id=role.id, body=role_groups) assert group.id sdk.add_group_user( group_id=group.id, body=models.GroupIdForGroupUserInclusion(user_id=user_id) ) @try_to def set_user_attributes(*, sdk: methods.LookerSDK, user_id, hackathon): hackathon_attr_id = get_hackathon_attr_id(sdk=sdk) assert hackathon_attr_id sdk.set_user_attribute_user_value( user_id=user_id, user_attribute_id=hackathon_attr_id, body=models.WriteUserAttributeWithValue(value=hackathon), ) @try_to def disable_user(*, sdk: methods.LookerSDK, user_id: int): sdk.update_user(user_id=user_id, body=models.WriteUser(is_disabled=True)) def me(): sdk = looker_sdk.init31() return sdk.me() class RegisterError(Exception): """Failed to register user in looker instance. """
{ "content_hash": "178e5017eb969a76a75d9c96b5e7d2aa", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 86, "avg_line_length": 30.77102803738318, "alnum_prop": 0.6192862566438876, "repo_name": "looker-open-source/sdk-examples", "id": "48e39664778d2a4be5b6207ddebaec4019fd1cf5", "size": "6585", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/hackathon_app/looker.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "672" }, { "name": "Dockerfile", "bytes": "308" }, { "name": "HTML", "bytes": "1843" }, { "name": "Python", "bytes": "87898" }, { "name": "Ruby", "bytes": "27873" }, { "name": "Shell", "bytes": "1118" }, { "name": "Swift", "bytes": "538353" }, { "name": "TypeScript", "bytes": "41064" } ], "symlink_target": "" }
import os, platform, shutil from optparse import OptionParser def main(): desc = "builds starbound using pyqt5 and python3" if platform.system() == "Windows": desc += " (with cx_freeze if --with-exe is passed)" parser = OptionParser(description=desc) parser.add_option("-p", "--prefix", "-b", "--build", "--build-dir", dest="prefix", default="build", help="build and install starboud to this prefix (default to build)") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="print status messages to stdout") if platform.system() == "Windows": parser.add_option("-e", "--exe", "--with-exe", "--enable-exe", action="store_true", dest="exe", help="generates .exe (windows only)") parser.add_option("-d", "--dist", "--dist-dir", dest="dist", default="dist", help="generates .exe to this dir (default to dist)") (options, args) = parser.parse_args() src_dir = os.path.dirname(os.path.realpath(__file__)); templates = os.listdir(os.path.join(src_dir, "starcheat", "templates")) prefix = os.path.expanduser(options.prefix) if platform.system() == "Windows": from distutils.sysconfig import get_python_lib site_packages_dir = get_python_lib() pyqt5_dir = os.path.join(site_packages_dir, "PyQt5") cx_freeze_Path = os.path.join(os.path.dirname(os.path.dirname(site_packages_dir)), "Scripts", "cxfreeze") dist = os.path.expanduser(options.dist) if options.verbose: print("Starting building starcheat to " + prefix + " ...") if os.path.exists(prefix): if options.verbose: print("Removing existing build directory") shutil.rmtree(prefix) if options.verbose: print("Copying starcheat python scripts") shutil.copytree(os.path.join(src_dir, "starcheat"), prefix, ignore=shutil.ignore_patterns("templates", "starbound", "images", "*.qrc")) if options.verbose: print("Copying py-starbound module") shutil.copytree(os.path.join(src_dir, "starcheat", "starbound", "starbound"), os.path.join(prefix, "starbound")) if options.verbose: print("Generating python Qt templates...") for t in templates: temp = os.path.join(src_dir, "starcheat", "templates", t) pyname = "qt_"+t.lower().replace(".ui", ".py") if platform.system() == "Windows": os.system(os.path.join(pyqt5_dir, "pyuic5.bat") + " \"" + temp + "\" > " + os.path.join(prefix, pyname)) else: os.system("pyuic5 \"" + temp + "\" > " + os.path.join(prefix, pyname)) if options.verbose: print("Generated " + pyname) if options.verbose: print("Generating python Qt resource...") res_file = os.path.join(src_dir, "starcheat", "resources.qrc") pyname = "resources_rc.py" if platform.system() == "Windows": os.system(os.path.join(pyqt5_dir, "pyrcc5.exe") + " \"" + res_file + "\" > " + os.path.join(prefix, pyname)) else: os.system("pyrcc5 \"" + res_file + "\" > " + os.path.join(prefix, pyname)) if options.verbose: print("Generated " + pyname) if options.verbose: print("Script build is complete!") if platform.system() == "Windows" and options.exe: if options.verbose: print("Starting generating starcheat standalone Windows build to " + dist + " ...") if os.path.exists(dist): if options.verbose: print("Removing existing dist directory") shutil.rmtree(dist) if options.verbose: print("Launching cx_freeze...") icon_path = os.path.join(src_dir, "starcheat", "images", "starcheat.ico") os.system("python " + cx_freeze_Path + " \"" + os.path.join(prefix, "starcheat.py") + "\" --target-dir=\"" + dist + "\" --base-name=Win32GUI --icon=\"" + icon_path +"\"") shutil.copy(os.path.join(pyqt5_dir, "libEGL.dll"), dist) if options.verbose: print("Standalone build is complete!") if __name__ == "__main__": main()
{ "content_hash": "34a19c1c695af5d55d48bb3878855b13", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 117, "avg_line_length": 45.086021505376344, "alnum_prop": 0.5883615549725734, "repo_name": "wizzomafizzo/starcheat", "id": "5dfb9504bb445a296aee609e3c097bd3c6692433", "size": "4217", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "205230" }, { "name": "Ruby", "bytes": "1293" } ], "symlink_target": "" }
from contact_form.views import ContactFormView from .forms import BeyondContactForm class BeyondContactFormView(ContactFormView): form_class = BeyondContactForm
{ "content_hash": "2cb7faead01a4a5f94c331d657d8e3e3", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 46, "avg_line_length": 27.666666666666668, "alnum_prop": 0.8433734939759037, "repo_name": "gfavre/beyondthewall.ch", "id": "ee74fb81021da7307eeaa19bbf3339dcd7a6a9dd", "size": "166", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "beyondthewall/contact/views.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "110155" }, { "name": "HTML", "bytes": "25818" }, { "name": "JavaScript", "bytes": "26787" }, { "name": "Python", "bytes": "20517" }, { "name": "SCSS", "bytes": "17622" }, { "name": "Shell", "bytes": "1247" }, { "name": "Smarty", "bytes": "582" } ], "symlink_target": "" }
import argparse import gzip import minify_html import jsmin import mimetypes import pathlib parser = argparse.ArgumentParser(description="Generate displayio resources.") parser.add_argument("--output_c_file", type=argparse.FileType("w"), required=True) parser.add_argument("files", metavar="FILE", type=argparse.FileType("rb"), nargs="+") args = parser.parse_args() c_file = args.output_c_file c_file.write(f"// Autogenerated by tools/gen_web_workflow_static.py\n") c_file.write(f"#include <stdint.h>\n\n") for f in args.files: path = pathlib.Path(f.name) variable = path.name.replace(".", "_") uncompressed = f.read() ulen = len(uncompressed) if f.name.endswith(".html"): uncompressed = minify_html.minify(uncompressed.decode("utf-8")).encode("utf-8") elif f.name.endswith(".js"): uncompressed = jsmin.jsmin(uncompressed.decode("utf-8"), quote_chars="'\"`").encode( "utf-8" ) compressed = gzip.compress(uncompressed) clen = len(compressed) compressed = ", ".join([hex(x) for x in compressed]) mime = mimetypes.guess_type(f.name)[0] c_file.write(f"// {f.name}\n") c_file.write(f"// Original length: {ulen} Compressed length: {clen}\n") c_file.write(f"const uint32_t {variable}_length = {clen};\n") c_file.write(f'const char* {variable}_content_type = "{mime}";\n') c_file.write(f"const uint8_t {variable}[{clen}] = {{{compressed}}};\n\n")
{ "content_hash": "adba68f5a15c4cd86921480de101ea56", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 92, "avg_line_length": 36.05, "alnum_prop": 0.6608876560332871, "repo_name": "adafruit/circuitpython", "id": "172a80dac034a91b99ff5238ebcddf3f6abc91f4", "size": "1612", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "tools/gen_web_workflow_static.py", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "10241" }, { "name": "C", "bytes": "18450191" }, { "name": "C++", "bytes": "476" }, { "name": "CMake", "bytes": "18203" }, { "name": "CSS", "bytes": "316" }, { "name": "HTML", "bytes": "10126" }, { "name": "JavaScript", "bytes": "13854" }, { "name": "Jinja", "bytes": "11034" }, { "name": "Makefile", "bytes": "330832" }, { "name": "Python", "bytes": "1423935" }, { "name": "Shell", "bytes": "18681" } ], "symlink_target": "" }
from runner.koan import * class AboutAttributeAccess(Koan): class TypicalObject(object): pass def test_calling_undefined_functions_normally_results_in_errors(self): typical = self.TypicalObject() try: typical.foobar() except Exception as exception: self.assertEqual('AttributeError', exception.__class__.__name__) self.assertMatch("'TypicalObject' object has no attribute 'foobar'", exception[0]) def test_calling_getattribute_causes_an_attribute_error(self): typical = self.TypicalObject() try: typical.__getattribute__('foobar') except AttributeError as exception: self.assertMatch("'TypicalObject' object has no attribute 'foobar'", exception[0]) # THINK ABOUT IT: # # If the method __getattribute__() causes the AttributeError, then # what would happen if we redefine __getattribute__()? # ------------------------------------------------------------------ class CatchAllAttributeReads(object): def __getattribute__(self, attr_name): return "Someone called '" + attr_name + \ "' and it could not be found" def test_all_attribute_reads_are_caught(self): catcher = self.CatchAllAttributeReads() self.assertMatch("Someone called 'foobar' and it could not be found", catcher.foobar) def test_intercepting_return_values_can_disrupt_the_call_chain(self): catcher = self.CatchAllAttributeReads() self.assertMatch("Someone called 'foobaz' and it could not be found", catcher.foobaz) # This is fine try: catcher.foobaz(1) except TypeError as ex: self.assertMatch("'str' object is not callable", ex[0]) # foobaz returns a string. What happens to the '(1)' part? # Try entering this into a python console to reproduce the issue: # # "foobaz"(1) # def test_changing_getattribute_will_affect__the_getattr_function(self): catcher = self.CatchAllAttributeReads() self.assertMatch("Someone called 'any_attribute' and it could not be found", getattr(catcher, 'any_attribute')) # ------------------------------------------------------------------ class WellBehavedFooCatcher(object): def __getattribute__(self, attr_name): if attr_name[:3] == "foo": return "Foo to you too" else: return \ super(AboutAttributeAccess.WellBehavedFooCatcher, self). \ __getattribute__(attr_name) def test_foo_attributes_are_caught(self): catcher = self.WellBehavedFooCatcher() self.assertEqual("Foo to you too", catcher.foo_bar) self.assertEqual("Foo to you too", catcher.foo_baz) def test_non_foo_messages_are_treated_normally(self): catcher = self.WellBehavedFooCatcher() try: catcher.normal_undefined_attribute except AttributeError as ex: self.assertMatch("'WellBehavedFooCatcher' object has no attribute 'normal_undefined_attribute'", ex[0]) # ------------------------------------------------------------------ global stack_depth stack_depth = 0 class RecursiveCatcher(object): def __init__(self): global stack_depth stack_depth = 0 self.no_of_getattribute_calls = 0 def __getattribute__(self, attr_name): #Uncomment for debugging info: #print 'Debug __getattribute__(' + type(self).__name__ + \ # "." + attr_name + ") dict=" + str(self.__dict__) # We need something that is outside the scope of this class: global stack_depth stack_depth += 1 if stack_depth <= 10: # to prevent a stack overflow self.no_of_getattribute_calls += 1 # Oops! We just accessed an attribute: no_of_getattribute_calls # Guess what happens when self.no_of_getattribute_calls is # accessed? # Using 'object' directly because using super() here will also # trigger a __getattribute__() call. return object.__getattribute__(self, attr_name) def my_method(self): pass def test_getattribute_is_a_bit_overzealous_sometimes(self): catcher = self.RecursiveCatcher() catcher.my_method() global stack_depth self.assertEqual(11, stack_depth) # ------------------------------------------------------------------ class MinimalCatcher(object): class DuffObject(object): pass def __init__(self): self.no_of_getattr_calls = 0 def __getattr__(self, attr_name): self.no_of_getattr_calls += 1 return self.DuffObject def my_method(self): pass def test_getattr_ignores_known_attributes(self): catcher = self.MinimalCatcher() catcher.my_method() self.assertEqual(0, catcher.no_of_getattr_calls) def test_getattr_only_catches_unknown_attributes(self): catcher = self.MinimalCatcher() catcher.purple_flamingos() catcher.free_pie() self.assertEqual("DuffObject", catcher.give_me_duff_or_give_me_death().__class__.__name__) self.assertEqual(3, catcher.no_of_getattr_calls) # ------------------------------------------------------------------ class PossessiveSetter(object): def __setattr__(self, attr_name, value): new_attr_name = attr_name if attr_name[-5:] == 'comic': new_attr_name = "my_" + new_attr_name elif attr_name[-3:] == 'pie': new_attr_name = "a_" + new_attr_name object.__setattr__(self, new_attr_name, value) def test_setattr_intercepts_attribute_assignments(self): fanboy = self.PossessiveSetter() fanboy.comic = 'The Laminator, issue #1' fanboy.pie = 'blueberry' self.assertEqual('blueberry', fanboy.a_pie) # # NOTE: Change the prefix to make this next assert pass # prefix = 'my' self.assertEqual( "The Laminator, issue #1", getattr(fanboy, prefix + '_comic')) # ------------------------------------------------------------------ class ScarySetter(object): def __init__(self): self.num_of_coconuts = 9 self._num_of_private_coconuts = 2 def __setattr__(self, attr_name, value): new_attr_name = attr_name if attr_name[0] != '_': new_attr_name = "altered_" + new_attr_name object.__setattr__(self, new_attr_name, value) def test_it_modifies_external_attribute_as_expected(self): setter = self.ScarySetter() setter.e = "mc hammer" self.assertEqual('mc hammer', setter.altered_e) def test_it_mangles_some_internal_attributes(self): setter = self.ScarySetter() try: coconuts = setter.num_of_coconuts except AttributeError: self.assertEqual(9, setter.altered_num_of_coconuts) def test_in_this_case_private_attributes_remain_unmangled(self): setter = self.ScarySetter() self.assertEqual(2, setter._num_of_private_coconuts)
{ "content_hash": "9a5701837dc593a6aae3317d26518069", "timestamp": "", "source": "github", "line_count": 221, "max_line_length": 119, "avg_line_length": 33.65610859728507, "alnum_prop": 0.5599623554719011, "repo_name": "febinstephen/python_koans_solutions", "id": "ac139b057608969a4c396805e2ad64e93413c6ba", "size": "7549", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python2/koans/about_attribute_access.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1633" }, { "name": "Python", "bytes": "334437" }, { "name": "Shell", "bytes": "167" } ], "symlink_target": "" }
from datetime import datetime, timedelta import numpy as np import pytest import pandas as pd from pandas import ( Categorical, DataFrame, DatetimeIndex, Index, NaT, Period, PeriodIndex, RangeIndex, Series, Timedelta, TimedeltaIndex, Timestamp, date_range, isna, timedelta_range, to_timedelta, ) import pandas._testing as tm from pandas.core import nanops def get_objs(): indexes = [ tm.makeBoolIndex(10, name="a"), tm.makeIntIndex(10, name="a"), tm.makeFloatIndex(10, name="a"), tm.makeDateIndex(10, name="a"), tm.makeDateIndex(10, name="a").tz_localize(tz="US/Eastern"), tm.makePeriodIndex(10, name="a"), tm.makeStringIndex(10, name="a"), tm.makeUnicodeIndex(10, name="a"), ] arr = np.random.randn(10) series = [Series(arr, index=idx, name="a") for idx in indexes] objs = indexes + series return objs objs = get_objs() class TestReductions: @pytest.mark.parametrize("opname", ["max", "min"]) @pytest.mark.parametrize("obj", objs) def test_ops(self, opname, obj): result = getattr(obj, opname)() if not isinstance(obj, PeriodIndex): expected = getattr(obj.values, opname)() else: expected = Period(ordinal=getattr(obj.asi8, opname)(), freq=obj.freq) if getattr(obj, "tz", None) is not None: # We need to de-localize before comparing to the numpy-produced result expected = expected.astype("M8[ns]").astype("int64") assert result.value == expected else: assert result == expected @pytest.mark.parametrize("opname", ["max", "min"]) @pytest.mark.parametrize( "dtype, val", [ ("object", 2.0), ("float64", 2.0), ("datetime64[ns]", datetime(2011, 11, 1)), ("Int64", 2), ("boolean", True), ], ) def test_nanminmax(self, opname, dtype, val, index_or_series): # GH#7261 klass = index_or_series if dtype in ["Int64", "boolean"] and klass == pd.Index: pytest.skip("EAs can't yet be stored in an index") def check_missing(res): if dtype == "datetime64[ns]": return res is pd.NaT elif dtype == "Int64": return res is pd.NA else: return pd.isna(res) obj = klass([None], dtype=dtype) assert check_missing(getattr(obj, opname)()) assert check_missing(getattr(obj, opname)(skipna=False)) obj = klass([], dtype=dtype) assert check_missing(getattr(obj, opname)()) assert check_missing(getattr(obj, opname)(skipna=False)) if dtype == "object": # generic test with object only works for empty / all NaN return obj = klass([None, val], dtype=dtype) assert getattr(obj, opname)() == val assert check_missing(getattr(obj, opname)(skipna=False)) obj = klass([None, val, None], dtype=dtype) assert getattr(obj, opname)() == val assert check_missing(getattr(obj, opname)(skipna=False)) @pytest.mark.parametrize("opname", ["max", "min"]) def test_nanargminmax(self, opname, index_or_series): # GH#7261 klass = index_or_series arg_op = "arg" + opname if klass is Index else "idx" + opname obj = klass([pd.NaT, datetime(2011, 11, 1)]) assert getattr(obj, arg_op)() == 1 result = getattr(obj, arg_op)(skipna=False) if klass is Series: assert np.isnan(result) else: assert result == -1 obj = klass([pd.NaT, datetime(2011, 11, 1), pd.NaT]) # check DatetimeIndex non-monotonic path assert getattr(obj, arg_op)() == 1 result = getattr(obj, arg_op)(skipna=False) if klass is Series: assert np.isnan(result) else: assert result == -1 @pytest.mark.parametrize("opname", ["max", "min"]) @pytest.mark.parametrize("dtype", ["M8[ns]", "datetime64[ns, UTC]"]) def test_nanops_empty_object(self, opname, index_or_series, dtype): klass = index_or_series arg_op = "arg" + opname if klass is Index else "idx" + opname obj = klass([], dtype=dtype) assert getattr(obj, opname)() is pd.NaT assert getattr(obj, opname)(skipna=False) is pd.NaT with pytest.raises(ValueError, match="empty sequence"): getattr(obj, arg_op)() with pytest.raises(ValueError, match="empty sequence"): getattr(obj, arg_op)(skipna=False) def test_argminmax(self): obj = Index(np.arange(5, dtype="int64")) assert obj.argmin() == 0 assert obj.argmax() == 4 obj = Index([np.nan, 1, np.nan, 2]) assert obj.argmin() == 1 assert obj.argmax() == 3 assert obj.argmin(skipna=False) == -1 assert obj.argmax(skipna=False) == -1 obj = Index([np.nan]) assert obj.argmin() == -1 assert obj.argmax() == -1 assert obj.argmin(skipna=False) == -1 assert obj.argmax(skipna=False) == -1 obj = Index([pd.NaT, datetime(2011, 11, 1), datetime(2011, 11, 2), pd.NaT]) assert obj.argmin() == 1 assert obj.argmax() == 2 assert obj.argmin(skipna=False) == -1 assert obj.argmax(skipna=False) == -1 obj = Index([pd.NaT]) assert obj.argmin() == -1 assert obj.argmax() == -1 assert obj.argmin(skipna=False) == -1 assert obj.argmax(skipna=False) == -1 @pytest.mark.parametrize("op, expected_col", [["max", "a"], ["min", "b"]]) def test_same_tz_min_max_axis_1(self, op, expected_col): # GH 10390 df = DataFrame( pd.date_range("2016-01-01 00:00:00", periods=3, tz="UTC"), columns=["a"] ) df["b"] = df.a.subtract(Timedelta(seconds=3600)) result = getattr(df, op)(axis=1) expected = df[expected_col].rename(None) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("func", ["maximum", "minimum"]) def test_numpy_reduction_with_tz_aware_dtype(self, tz_aware_fixture, func): # GH 15552 tz = tz_aware_fixture arg = pd.to_datetime(["2019"]).tz_localize(tz) expected = Series(arg) result = getattr(np, func)(expected, expected) tm.assert_series_equal(result, expected) class TestIndexReductions: # Note: the name TestIndexReductions indicates these tests # were moved from a Index-specific test file, _not_ that these tests are # intended long-term to be Index-specific @pytest.mark.parametrize( "start,stop,step", [ (0, 400, 3), (500, 0, -6), (-(10 ** 6), 10 ** 6, 4), (10 ** 6, -(10 ** 6), -4), (0, 10, 20), ], ) def test_max_min_range(self, start, stop, step): # GH#17607 idx = RangeIndex(start, stop, step) expected = idx._int64index.max() result = idx.max() assert result == expected # skipna should be irrelevant since RangeIndex should never have NAs result2 = idx.max(skipna=False) assert result2 == expected expected = idx._int64index.min() result = idx.min() assert result == expected # skipna should be irrelevant since RangeIndex should never have NAs result2 = idx.min(skipna=False) assert result2 == expected # empty idx = RangeIndex(start, stop, -step) assert isna(idx.max()) assert isna(idx.min()) def test_minmax_timedelta64(self): # monotonic idx1 = TimedeltaIndex(["1 days", "2 days", "3 days"]) assert idx1.is_monotonic # non-monotonic idx2 = TimedeltaIndex(["1 days", np.nan, "3 days", "NaT"]) assert not idx2.is_monotonic for idx in [idx1, idx2]: assert idx.min() == Timedelta("1 days") assert idx.max() == Timedelta("3 days") assert idx.argmin() == 0 assert idx.argmax() == 2 @pytest.mark.parametrize("op", ["min", "max"]) def test_minmax_timedelta_empty_or_na(self, op): # Return NaT obj = TimedeltaIndex([]) assert getattr(obj, op)() is pd.NaT obj = TimedeltaIndex([pd.NaT]) assert getattr(obj, op)() is pd.NaT obj = TimedeltaIndex([pd.NaT, pd.NaT, pd.NaT]) assert getattr(obj, op)() is pd.NaT def test_numpy_minmax_timedelta64(self): td = timedelta_range("16815 days", "16820 days", freq="D") assert np.min(td) == Timedelta("16815 days") assert np.max(td) == Timedelta("16820 days") errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.min(td, out=0) with pytest.raises(ValueError, match=errmsg): np.max(td, out=0) assert np.argmin(td) == 0 assert np.argmax(td) == 5 errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.argmin(td, out=0) with pytest.raises(ValueError, match=errmsg): np.argmax(td, out=0) def test_timedelta_ops(self): # GH#4984 # make sure ops return Timedelta s = Series( [Timestamp("20130101") + timedelta(seconds=i * i) for i in range(10)] ) td = s.diff() result = td.mean() expected = to_timedelta(timedelta(seconds=9)) assert result == expected result = td.to_frame().mean() assert result[0] == expected result = td.quantile(0.1) expected = Timedelta(np.timedelta64(2600, "ms")) assert result == expected result = td.median() expected = to_timedelta("00:00:09") assert result == expected result = td.to_frame().median() assert result[0] == expected # GH#6462 # consistency in returned values for sum result = td.sum() expected = to_timedelta("00:01:21") assert result == expected result = td.to_frame().sum() assert result[0] == expected # std result = td.std() expected = to_timedelta(Series(td.dropna().values).std()) assert result == expected result = td.to_frame().std() assert result[0] == expected # GH#10040 # make sure NaT is properly handled by median() s = Series([Timestamp("2015-02-03"), Timestamp("2015-02-07")]) assert s.diff().median() == timedelta(days=4) s = Series( [Timestamp("2015-02-03"), Timestamp("2015-02-07"), Timestamp("2015-02-15")] ) assert s.diff().median() == timedelta(days=6) @pytest.mark.parametrize("opname", ["skew", "kurt", "sem", "prod", "var"]) def test_invalid_td64_reductions(self, opname): s = Series( [Timestamp("20130101") + timedelta(seconds=i * i) for i in range(10)] ) td = s.diff() msg = "|".join( [ f"reduction operation '{opname}' not allowed for this dtype", rf"cannot perform {opname} with type timedelta64\[ns\]", f"'TimedeltaArray' does not implement reduction '{opname}'", ] ) with pytest.raises(TypeError, match=msg): getattr(td, opname)() with pytest.raises(TypeError, match=msg): getattr(td.to_frame(), opname)(numeric_only=False) def test_minmax_tz(self, tz_naive_fixture): tz = tz_naive_fixture # monotonic idx1 = DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], tz=tz) assert idx1.is_monotonic # non-monotonic idx2 = DatetimeIndex( ["2011-01-01", pd.NaT, "2011-01-03", "2011-01-02", pd.NaT], tz=tz ) assert not idx2.is_monotonic for idx in [idx1, idx2]: assert idx.min() == Timestamp("2011-01-01", tz=tz) assert idx.max() == Timestamp("2011-01-03", tz=tz) assert idx.argmin() == 0 assert idx.argmax() == 2 @pytest.mark.parametrize("op", ["min", "max"]) def test_minmax_nat_datetime64(self, op): # Return NaT obj = DatetimeIndex([]) assert pd.isna(getattr(obj, op)()) obj = DatetimeIndex([pd.NaT]) assert pd.isna(getattr(obj, op)()) obj = DatetimeIndex([pd.NaT, pd.NaT, pd.NaT]) assert pd.isna(getattr(obj, op)()) def test_numpy_minmax_integer(self): # GH#26125 idx = Index([1, 2, 3]) expected = idx.values.max() result = np.max(idx) assert result == expected expected = idx.values.min() result = np.min(idx) assert result == expected errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.min(idx, out=0) with pytest.raises(ValueError, match=errmsg): np.max(idx, out=0) expected = idx.values.argmax() result = np.argmax(idx) assert result == expected expected = idx.values.argmin() result = np.argmin(idx) assert result == expected errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.argmin(idx, out=0) with pytest.raises(ValueError, match=errmsg): np.argmax(idx, out=0) def test_numpy_minmax_range(self): # GH#26125 idx = RangeIndex(0, 10, 3) expected = idx._int64index.max() result = np.max(idx) assert result == expected expected = idx._int64index.min() result = np.min(idx) assert result == expected errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.min(idx, out=0) with pytest.raises(ValueError, match=errmsg): np.max(idx, out=0) # No need to test again argmax/argmin compat since the implementation # is the same as basic integer index def test_numpy_minmax_datetime64(self): dr = pd.date_range(start="2016-01-15", end="2016-01-20") assert np.min(dr) == Timestamp("2016-01-15 00:00:00", freq="D") assert np.max(dr) == Timestamp("2016-01-20 00:00:00", freq="D") errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.min(dr, out=0) with pytest.raises(ValueError, match=errmsg): np.max(dr, out=0) assert np.argmin(dr) == 0 assert np.argmax(dr) == 5 errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.argmin(dr, out=0) with pytest.raises(ValueError, match=errmsg): np.argmax(dr, out=0) def test_minmax_period(self): # monotonic idx1 = PeriodIndex([NaT, "2011-01-01", "2011-01-02", "2011-01-03"], freq="D") assert not idx1.is_monotonic assert idx1[1:].is_monotonic # non-monotonic idx2 = PeriodIndex( ["2011-01-01", NaT, "2011-01-03", "2011-01-02", NaT], freq="D" ) assert not idx2.is_monotonic for idx in [idx1, idx2]: assert idx.min() == Period("2011-01-01", freq="D") assert idx.max() == Period("2011-01-03", freq="D") assert idx1.argmin() == 1 assert idx2.argmin() == 0 assert idx1.argmax() == 3 assert idx2.argmax() == 2 for op in ["min", "max"]: # Return NaT obj = PeriodIndex([], freq="M") result = getattr(obj, op)() assert result is NaT obj = PeriodIndex([NaT], freq="M") result = getattr(obj, op)() assert result is NaT obj = PeriodIndex([NaT, NaT, NaT], freq="M") result = getattr(obj, op)() assert result is NaT def test_numpy_minmax_period(self): pr = pd.period_range(start="2016-01-15", end="2016-01-20") assert np.min(pr) == Period("2016-01-15", freq="D") assert np.max(pr) == Period("2016-01-20", freq="D") errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.min(pr, out=0) with pytest.raises(ValueError, match=errmsg): np.max(pr, out=0) assert np.argmin(pr) == 0 assert np.argmax(pr) == 5 errmsg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=errmsg): np.argmin(pr, out=0) with pytest.raises(ValueError, match=errmsg): np.argmax(pr, out=0) def test_min_max_categorical(self): ci = pd.CategoricalIndex(list("aabbca"), categories=list("cab"), ordered=False) msg = ( r"Categorical is not ordered for operation min\n" r"you can use .as_ordered\(\) to change the Categorical to an ordered one\n" ) with pytest.raises(TypeError, match=msg): ci.min() msg = ( r"Categorical is not ordered for operation max\n" r"you can use .as_ordered\(\) to change the Categorical to an ordered one\n" ) with pytest.raises(TypeError, match=msg): ci.max() ci = pd.CategoricalIndex(list("aabbca"), categories=list("cab"), ordered=True) assert ci.min() == "c" assert ci.max() == "b" class TestSeriesReductions: # Note: the name TestSeriesReductions indicates these tests # were moved from a series-specific test file, _not_ that these tests are # intended long-term to be series-specific def test_sum_inf(self): s = Series(np.random.randn(10)) s2 = s.copy() s[5:8] = np.inf s2[5:8] = np.nan assert np.isinf(s.sum()) arr = np.random.randn(100, 100).astype("f4") arr[:, 2] = np.inf with pd.option_context("mode.use_inf_as_na", True): tm.assert_almost_equal(s.sum(), s2.sum()) res = nanops.nansum(arr, axis=1) assert np.isinf(res).all() @pytest.mark.parametrize("dtype", ["float64", "Int64", "boolean", "object"]) @pytest.mark.parametrize("use_bottleneck", [True, False]) @pytest.mark.parametrize("method, unit", [("sum", 0.0), ("prod", 1.0)]) def test_empty(self, method, unit, use_bottleneck, dtype): with pd.option_context("use_bottleneck", use_bottleneck): # GH#9422 / GH#18921 # Entirely empty s = Series([], dtype=dtype) # NA by default result = getattr(s, method)() assert result == unit # Explicit result = getattr(s, method)(min_count=0) assert result == unit result = getattr(s, method)(min_count=1) assert pd.isna(result) # Skipna, default result = getattr(s, method)(skipna=True) result == unit # Skipna, explicit result = getattr(s, method)(skipna=True, min_count=0) assert result == unit result = getattr(s, method)(skipna=True, min_count=1) assert pd.isna(result) result = getattr(s, method)(skipna=False, min_count=0) assert result == unit result = getattr(s, method)(skipna=False, min_count=1) assert pd.isna(result) # All-NA s = Series([np.nan], dtype=dtype) # NA by default result = getattr(s, method)() assert result == unit # Explicit result = getattr(s, method)(min_count=0) assert result == unit result = getattr(s, method)(min_count=1) assert pd.isna(result) # Skipna, default result = getattr(s, method)(skipna=True) result == unit # skipna, explicit result = getattr(s, method)(skipna=True, min_count=0) assert result == unit result = getattr(s, method)(skipna=True, min_count=1) assert pd.isna(result) # Mix of valid, empty s = Series([np.nan, 1], dtype=dtype) # Default result = getattr(s, method)() assert result == 1.0 # Explicit result = getattr(s, method)(min_count=0) assert result == 1.0 result = getattr(s, method)(min_count=1) assert result == 1.0 # Skipna result = getattr(s, method)(skipna=True) assert result == 1.0 result = getattr(s, method)(skipna=True, min_count=0) assert result == 1.0 # GH#844 (changed in GH#9422) df = DataFrame(np.empty((10, 0)), dtype=dtype) assert (getattr(df, method)(1) == unit).all() s = Series([1], dtype=dtype) result = getattr(s, method)(min_count=2) assert pd.isna(result) result = getattr(s, method)(skipna=False, min_count=2) assert pd.isna(result) s = Series([np.nan], dtype=dtype) result = getattr(s, method)(min_count=2) assert pd.isna(result) s = Series([np.nan, 1], dtype=dtype) result = getattr(s, method)(min_count=2) assert pd.isna(result) @pytest.mark.parametrize("method, unit", [("sum", 0.0), ("prod", 1.0)]) def test_empty_multi(self, method, unit): s = Series( [1, np.nan, np.nan, np.nan], index=pd.MultiIndex.from_product([("a", "b"), (0, 1)]), ) # 1 / 0 by default result = getattr(s, method)(level=0) expected = Series([1, unit], index=["a", "b"]) tm.assert_series_equal(result, expected) # min_count=0 result = getattr(s, method)(level=0, min_count=0) expected = Series([1, unit], index=["a", "b"]) tm.assert_series_equal(result, expected) # min_count=1 result = getattr(s, method)(level=0, min_count=1) expected = Series([1, np.nan], index=["a", "b"]) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("method", ["mean"]) @pytest.mark.parametrize("dtype", ["Float64", "Int64", "boolean"]) def test_ops_consistency_on_empty_nullable(self, method, dtype): # GH#34814 # consistency for nullable dtypes on empty or ALL-NA mean # empty series eser = Series([], dtype=dtype) result = getattr(eser, method)() assert result is pd.NA # ALL-NA series nser = Series([np.nan], dtype=dtype) result = getattr(nser, method)() assert result is pd.NA @pytest.mark.parametrize("method", ["mean", "median", "std", "var"]) def test_ops_consistency_on_empty(self, method): # GH#7869 # consistency on empty # float result = getattr(Series(dtype=float), method)() assert pd.isna(result) # timedelta64[ns] tdser = Series([], dtype="m8[ns]") if method == "var": msg = "|".join( [ "operation 'var' not allowed", r"cannot perform var with type timedelta64\[ns\]", "'TimedeltaArray' does not implement reduction 'var'", ] ) with pytest.raises(TypeError, match=msg): getattr(tdser, method)() else: result = getattr(tdser, method)() assert result is pd.NaT def test_nansum_buglet(self): ser = Series([1.0, np.nan], index=[0, 1]) result = np.nansum(ser) tm.assert_almost_equal(result, 1) @pytest.mark.parametrize("use_bottleneck", [True, False]) def test_sum_overflow(self, use_bottleneck): with pd.option_context("use_bottleneck", use_bottleneck): # GH#6915 # overflowing on the smaller int dtypes for dtype in ["int32", "int64"]: v = np.arange(5000000, dtype=dtype) s = Series(v) result = s.sum(skipna=False) assert int(result) == v.sum(dtype="int64") result = s.min(skipna=False) assert int(result) == 0 result = s.max(skipna=False) assert int(result) == v[-1] for dtype in ["float32", "float64"]: v = np.arange(5000000, dtype=dtype) s = Series(v) result = s.sum(skipna=False) assert result == v.sum(dtype=dtype) result = s.min(skipna=False) assert np.allclose(float(result), 0.0) result = s.max(skipna=False) assert np.allclose(float(result), v[-1]) def test_empty_timeseries_reductions_return_nat(self): # covers GH#11245 for dtype in ("m8[ns]", "m8[ns]", "M8[ns]", "M8[ns, UTC]"): assert Series([], dtype=dtype).min() is pd.NaT assert Series([], dtype=dtype).max() is pd.NaT assert Series([], dtype=dtype).min(skipna=False) is pd.NaT assert Series([], dtype=dtype).max(skipna=False) is pd.NaT def test_numpy_argmin(self): # See GH#16830 data = np.arange(1, 11) s = Series(data, index=data) result = np.argmin(s) expected = np.argmin(data) assert result == expected result = s.argmin() assert result == expected msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argmin(s, out=data) def test_numpy_argmax(self): # See GH#16830 data = np.arange(1, 11) s = Series(data, index=data) result = np.argmax(s) expected = np.argmax(data) assert result == expected result = s.argmax() assert result == expected msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argmax(s, out=data) def test_idxmin(self): # test idxmin # _check_stat_op approach can not be used here because of isna check. string_series = tm.makeStringSeries().rename("series") # add some NaNs string_series[5:15] = np.NaN # skipna or no assert string_series[string_series.idxmin()] == string_series.min() assert pd.isna(string_series.idxmin(skipna=False)) # no NaNs nona = string_series.dropna() assert nona[nona.idxmin()] == nona.min() assert nona.index.values.tolist().index(nona.idxmin()) == nona.values.argmin() # all NaNs allna = string_series * np.nan assert pd.isna(allna.idxmin()) # datetime64[ns] s = Series(pd.date_range("20130102", periods=6)) result = s.idxmin() assert result == 0 s[0] = np.nan result = s.idxmin() assert result == 1 def test_idxmax(self): # test idxmax # _check_stat_op approach can not be used here because of isna check. string_series = tm.makeStringSeries().rename("series") # add some NaNs string_series[5:15] = np.NaN # skipna or no assert string_series[string_series.idxmax()] == string_series.max() assert pd.isna(string_series.idxmax(skipna=False)) # no NaNs nona = string_series.dropna() assert nona[nona.idxmax()] == nona.max() assert nona.index.values.tolist().index(nona.idxmax()) == nona.values.argmax() # all NaNs allna = string_series * np.nan assert pd.isna(allna.idxmax()) from pandas import date_range s = Series(date_range("20130102", periods=6)) result = s.idxmax() assert result == 5 s[5] = np.nan result = s.idxmax() assert result == 4 # Float64Index # GH#5914 s = Series([1, 2, 3], [1.1, 2.1, 3.1]) result = s.idxmax() assert result == 3.1 result = s.idxmin() assert result == 1.1 s = Series(s.index, s.index) result = s.idxmax() assert result == 3.1 result = s.idxmin() assert result == 1.1 def test_all_any(self): ts = tm.makeTimeSeries() bool_series = ts > 0 assert not bool_series.all() assert bool_series.any() # Alternative types, with implicit 'object' dtype. s = Series(["abc", True]) assert "abc" == s.any() # 'abc' || True => 'abc' def test_all_any_params(self): # Check skipna, with implicit 'object' dtype. s1 = Series([np.nan, True]) s2 = Series([np.nan, False]) assert s1.all(skipna=False) # nan && True => True assert s1.all(skipna=True) assert np.isnan(s2.any(skipna=False)) # nan || False => nan assert not s2.any(skipna=True) # Check level. s = Series([False, False, True, True, False, True], index=[0, 0, 1, 1, 2, 2]) tm.assert_series_equal(s.all(level=0), Series([False, True, False])) tm.assert_series_equal(s.any(level=0), Series([False, True, True])) msg = "Option bool_only is not implemented with option level" with pytest.raises(NotImplementedError, match=msg): s.any(bool_only=True, level=0) with pytest.raises(NotImplementedError, match=msg): s.all(bool_only=True, level=0) # bool_only is not implemented alone. # TODO GH38810 change this error message to: # "Series.any does not implement bool_only" msg = "Series.any does not implement numeric_only" with pytest.raises(NotImplementedError, match=msg): s.any(bool_only=True) msg = "Series.all does not implement numeric_only." with pytest.raises(NotImplementedError, match=msg): s.all(bool_only=True) def test_all_any_boolean(self): # Check skipna, with boolean type s1 = Series([pd.NA, True], dtype="boolean") s2 = Series([pd.NA, False], dtype="boolean") assert s1.all(skipna=False) is pd.NA # NA && True => NA assert s1.all(skipna=True) assert s2.any(skipna=False) is pd.NA # NA || False => NA assert not s2.any(skipna=True) # GH-33253: all True / all False values buggy with skipna=False s3 = Series([True, True], dtype="boolean") s4 = Series([False, False], dtype="boolean") assert s3.all(skipna=False) assert not s4.any(skipna=False) # Check level TODO(GH-33449) result should also be boolean s = Series( [False, False, True, True, False, True], index=[0, 0, 1, 1, 2, 2], dtype="boolean", ) tm.assert_series_equal(s.all(level=0), Series([False, True, False])) tm.assert_series_equal(s.any(level=0), Series([False, True, True])) def test_any_axis1_bool_only(self): # GH#32432 df = DataFrame({"A": [True, False], "B": [1, 2]}) result = df.any(axis=1, bool_only=True) expected = Series([True, False]) tm.assert_series_equal(result, expected) def test_any_all_datetimelike(self): # GH#38723 these may not be the desired long-term behavior (GH#34479) # but in the interim should be internally consistent dta = date_range("1995-01-02", periods=3)._data ser = Series(dta) df = DataFrame(ser) assert dta.all() assert dta.any() assert ser.all() assert ser.any() assert df.any().all() assert df.all().all() dta = dta.tz_localize("UTC") ser = Series(dta) df = DataFrame(ser) assert dta.all() assert dta.any() assert ser.all() assert ser.any() assert df.any().all() assert df.all().all() tda = dta - dta[0] ser = Series(tda) df = DataFrame(ser) assert tda.any() assert not tda.all() assert ser.any() assert not ser.all() assert df.any().all() assert not df.all().any() def test_timedelta64_analytics(self): # index min/max dti = pd.date_range("2012-1-1", periods=3, freq="D") td = Series(dti) - Timestamp("20120101") result = td.idxmin() assert result == 0 result = td.idxmax() assert result == 2 # GH#2982 # with NaT td[0] = np.nan result = td.idxmin() assert result == 1 result = td.idxmax() assert result == 2 # abs s1 = Series(pd.date_range("20120101", periods=3)) s2 = Series(pd.date_range("20120102", periods=3)) expected = Series(s2 - s1) result = np.abs(s1 - s2) tm.assert_series_equal(result, expected) result = (s1 - s2).abs() tm.assert_series_equal(result, expected) # max/min result = td.max() expected = Timedelta("2 days") assert result == expected result = td.min() expected = Timedelta("1 days") assert result == expected @pytest.mark.parametrize( "test_input,error_type", [ (Series([], dtype="float64"), ValueError), # For strings, or any Series with dtype 'O' (Series(["foo", "bar", "baz"]), TypeError), (Series([(1,), (2,)]), TypeError), # For mixed data types (Series(["foo", "foo", "bar", "bar", None, np.nan, "baz"]), TypeError), ], ) def test_assert_idxminmax_raises(self, test_input, error_type): """ Cases where ``Series.argmax`` and related should raise an exception """ msg = ( "reduction operation 'argmin' not allowed for this dtype|" "attempt to get argmin of an empty sequence" ) with pytest.raises(error_type, match=msg): test_input.idxmin() with pytest.raises(error_type, match=msg): test_input.idxmin(skipna=False) msg = ( "reduction operation 'argmax' not allowed for this dtype|" "attempt to get argmax of an empty sequence" ) with pytest.raises(error_type, match=msg): test_input.idxmax() with pytest.raises(error_type, match=msg): test_input.idxmax(skipna=False) def test_idxminmax_with_inf(self): # For numeric data with NA and Inf (GH #13595) s = Series([0, -np.inf, np.inf, np.nan]) assert s.idxmin() == 1 assert np.isnan(s.idxmin(skipna=False)) assert s.idxmax() == 2 assert np.isnan(s.idxmax(skipna=False)) # Using old-style behavior that treats floating point nan, -inf, and # +inf as missing with pd.option_context("mode.use_inf_as_na", True): assert s.idxmin() == 0 assert np.isnan(s.idxmin(skipna=False)) assert s.idxmax() == 0 np.isnan(s.idxmax(skipna=False)) class TestDatetime64SeriesReductions: # Note: the name TestDatetime64SeriesReductions indicates these tests # were moved from a series-specific test file, _not_ that these tests are # intended long-term to be series-specific @pytest.mark.parametrize( "nat_ser", [ Series([pd.NaT, pd.NaT]), Series([pd.NaT, Timedelta("nat")]), Series([Timedelta("nat"), Timedelta("nat")]), ], ) def test_minmax_nat_series(self, nat_ser): # GH#23282 assert nat_ser.min() is pd.NaT assert nat_ser.max() is pd.NaT assert nat_ser.min(skipna=False) is pd.NaT assert nat_ser.max(skipna=False) is pd.NaT @pytest.mark.parametrize( "nat_df", [ DataFrame([pd.NaT, pd.NaT]), DataFrame([pd.NaT, Timedelta("nat")]), DataFrame([Timedelta("nat"), Timedelta("nat")]), ], ) def test_minmax_nat_dataframe(self, nat_df): # GH#23282 assert nat_df.min()[0] is pd.NaT assert nat_df.max()[0] is pd.NaT assert nat_df.min(skipna=False)[0] is pd.NaT assert nat_df.max(skipna=False)[0] is pd.NaT def test_min_max(self): rng = pd.date_range("1/1/2000", "12/31/2000") rng2 = rng.take(np.random.permutation(len(rng))) the_min = rng2.min() the_max = rng2.max() assert isinstance(the_min, Timestamp) assert isinstance(the_max, Timestamp) assert the_min == rng[0] assert the_max == rng[-1] assert rng.min() == rng[0] assert rng.max() == rng[-1] def test_min_max_series(self): rng = pd.date_range("1/1/2000", periods=10, freq="4h") lvls = ["A", "A", "A", "B", "B", "B", "C", "C", "C", "C"] df = DataFrame({"TS": rng, "V": np.random.randn(len(rng)), "L": lvls}) result = df.TS.max() exp = Timestamp(df.TS.iat[-1]) assert isinstance(result, Timestamp) assert result == exp result = df.TS.min() exp = Timestamp(df.TS.iat[0]) assert isinstance(result, Timestamp) assert result == exp class TestCategoricalSeriesReductions: # Note: the name TestCategoricalSeriesReductions indicates these tests # were moved from a series-specific test file, _not_ that these tests are # intended long-term to be series-specific @pytest.mark.parametrize("function", ["min", "max"]) def test_min_max_unordered_raises(self, function): # unordered cats have no min/max cat = Series(Categorical(["a", "b", "c", "d"], ordered=False)) msg = f"Categorical is not ordered for operation {function}" with pytest.raises(TypeError, match=msg): getattr(cat, function)() @pytest.mark.parametrize( "values, categories", [ (list("abc"), list("abc")), (list("abc"), list("cba")), (list("abc") + [np.nan], list("cba")), ([1, 2, 3], [3, 2, 1]), ([1, 2, 3, np.nan], [3, 2, 1]), ], ) @pytest.mark.parametrize("function", ["min", "max"]) def test_min_max_ordered(self, values, categories, function): # GH 25303 cat = Series(Categorical(values, categories=categories, ordered=True)) result = getattr(cat, function)(skipna=True) expected = categories[0] if function == "min" else categories[2] assert result == expected @pytest.mark.parametrize("function", ["min", "max"]) @pytest.mark.parametrize("skipna", [True, False]) def test_min_max_ordered_with_nan_only(self, function, skipna): # https://github.com/pandas-dev/pandas/issues/33450 cat = Series(Categorical([np.nan], categories=[1, 2], ordered=True)) result = getattr(cat, function)(skipna=skipna) assert result is np.nan @pytest.mark.parametrize("function", ["min", "max"]) @pytest.mark.parametrize("skipna", [True, False]) def test_min_max_skipna(self, function, skipna): cat = Series( Categorical(["a", "b", np.nan, "a"], categories=["b", "a"], ordered=True) ) result = getattr(cat, function)(skipna=skipna) if skipna is True: expected = "b" if function == "min" else "a" assert result == expected else: assert result is np.nan class TestSeriesMode: # Note: the name TestSeriesMode indicates these tests # were moved from a series-specific test file, _not_ that these tests are # intended long-term to be series-specific @pytest.mark.parametrize( "dropna, expected", [(True, Series([], dtype=np.float64)), (False, Series([], dtype=np.float64))], ) def test_mode_empty(self, dropna, expected): s = Series([], dtype=np.float64) result = s.mode(dropna) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "dropna, data, expected", [ (True, [1, 1, 1, 2], [1]), (True, [1, 1, 1, 2, 3, 3, 3], [1, 3]), (False, [1, 1, 1, 2], [1]), (False, [1, 1, 1, 2, 3, 3, 3], [1, 3]), ], ) @pytest.mark.parametrize( "dt", list(np.typecodes["AllInteger"] + np.typecodes["Float"]) ) def test_mode_numerical(self, dropna, data, expected, dt): s = Series(data, dtype=dt) result = s.mode(dropna) expected = Series(expected, dtype=dt) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("dropna, expected", [(True, [1.0]), (False, [1, np.nan])]) def test_mode_numerical_nan(self, dropna, expected): s = Series([1, 1, 2, np.nan, np.nan]) result = s.mode(dropna) expected = Series(expected) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "dropna, expected1, expected2, expected3", [(True, ["b"], ["bar"], ["nan"]), (False, ["b"], [np.nan], ["nan"])], ) def test_mode_str_obj(self, dropna, expected1, expected2, expected3): # Test string and object types. data = ["a"] * 2 + ["b"] * 3 s = Series(data, dtype="c") result = s.mode(dropna) expected1 = Series(expected1, dtype="c") tm.assert_series_equal(result, expected1) data = ["foo", "bar", "bar", np.nan, np.nan, np.nan] s = Series(data, dtype=object) result = s.mode(dropna) expected2 = Series(expected2, dtype=object) tm.assert_series_equal(result, expected2) data = ["foo", "bar", "bar", np.nan, np.nan, np.nan] s = Series(data, dtype=object).astype(str) result = s.mode(dropna) expected3 = Series(expected3, dtype=str) tm.assert_series_equal(result, expected3) @pytest.mark.parametrize( "dropna, expected1, expected2", [(True, ["foo"], ["foo"]), (False, ["foo"], [np.nan])], ) def test_mode_mixeddtype(self, dropna, expected1, expected2): s = Series([1, "foo", "foo"]) result = s.mode(dropna) expected = Series(expected1) tm.assert_series_equal(result, expected) s = Series([1, "foo", "foo", np.nan, np.nan, np.nan]) result = s.mode(dropna) expected = Series(expected2, dtype=object) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "dropna, expected1, expected2", [ ( True, ["1900-05-03", "2011-01-03", "2013-01-02"], ["2011-01-03", "2013-01-02"], ), (False, [np.nan], [np.nan, "2011-01-03", "2013-01-02"]), ], ) def test_mode_datetime(self, dropna, expected1, expected2): s = Series( ["2011-01-03", "2013-01-02", "1900-05-03", "nan", "nan"], dtype="M8[ns]" ) result = s.mode(dropna) expected1 = Series(expected1, dtype="M8[ns]") tm.assert_series_equal(result, expected1) s = Series( [ "2011-01-03", "2013-01-02", "1900-05-03", "2011-01-03", "2013-01-02", "nan", "nan", ], dtype="M8[ns]", ) result = s.mode(dropna) expected2 = Series(expected2, dtype="M8[ns]") tm.assert_series_equal(result, expected2) @pytest.mark.parametrize( "dropna, expected1, expected2", [ (True, ["-1 days", "0 days", "1 days"], ["2 min", "1 day"]), (False, [np.nan], [np.nan, "2 min", "1 day"]), ], ) def test_mode_timedelta(self, dropna, expected1, expected2): # gh-5986: Test timedelta types. s = Series( ["1 days", "-1 days", "0 days", "nan", "nan"], dtype="timedelta64[ns]" ) result = s.mode(dropna) expected1 = Series(expected1, dtype="timedelta64[ns]") tm.assert_series_equal(result, expected1) s = Series( [ "1 day", "1 day", "-1 day", "-1 day 2 min", "2 min", "2 min", "nan", "nan", ], dtype="timedelta64[ns]", ) result = s.mode(dropna) expected2 = Series(expected2, dtype="timedelta64[ns]") tm.assert_series_equal(result, expected2) @pytest.mark.parametrize( "dropna, expected1, expected2, expected3", [ ( True, Categorical([1, 2], categories=[1, 2]), Categorical(["a"], categories=[1, "a"]), Categorical([3, 1], categories=[3, 2, 1], ordered=True), ), ( False, Categorical([np.nan], categories=[1, 2]), Categorical([np.nan, "a"], categories=[1, "a"]), Categorical([np.nan, 3, 1], categories=[3, 2, 1], ordered=True), ), ], ) def test_mode_category(self, dropna, expected1, expected2, expected3): s = Series(Categorical([1, 2, np.nan, np.nan])) result = s.mode(dropna) expected1 = Series(expected1, dtype="category") tm.assert_series_equal(result, expected1) s = Series(Categorical([1, "a", "a", np.nan, np.nan])) result = s.mode(dropna) expected2 = Series(expected2, dtype="category") tm.assert_series_equal(result, expected2) s = Series( Categorical( [1, 1, 2, 3, 3, np.nan, np.nan], categories=[3, 2, 1], ordered=True ) ) result = s.mode(dropna) expected3 = Series(expected3, dtype="category") tm.assert_series_equal(result, expected3) @pytest.mark.parametrize( "dropna, expected1, expected2", [(True, [2 ** 63], [1, 2 ** 63]), (False, [2 ** 63], [1, 2 ** 63])], ) def test_mode_intoverflow(self, dropna, expected1, expected2): # Test for uint64 overflow. s = Series([1, 2 ** 63, 2 ** 63], dtype=np.uint64) result = s.mode(dropna) expected1 = Series(expected1, dtype=np.uint64) tm.assert_series_equal(result, expected1) s = Series([1, 2 ** 63], dtype=np.uint64) result = s.mode(dropna) expected2 = Series(expected2, dtype=np.uint64) tm.assert_series_equal(result, expected2) def test_mode_sortwarning(self): # Check for the warning that is raised when the mode # results cannot be sorted expected = Series(["foo", np.nan]) s = Series([1, "foo", "foo", np.nan, np.nan]) with tm.assert_produces_warning(UserWarning, check_stacklevel=False): result = s.mode(dropna=False) result = result.sort_values().reset_index(drop=True) tm.assert_series_equal(result, expected)
{ "content_hash": "2c4ad18b84d0789743c8d60e8cccd91c", "timestamp": "", "source": "github", "line_count": 1424, "max_line_length": 88, "avg_line_length": 33.24087078651685, "alnum_prop": 0.5477764867434245, "repo_name": "jreback/pandas", "id": "cb64b2423696f52a68984b81e39c90af0fdca566", "size": "47335", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pandas/tests/reductions/test_reductions.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "4879" }, { "name": "C", "bytes": "406353" }, { "name": "C++", "bytes": "17193" }, { "name": "HTML", "bytes": "606963" }, { "name": "Makefile", "bytes": "529" }, { "name": "Python", "bytes": "14930989" }, { "name": "Shell", "bytes": "29317" }, { "name": "Smarty", "bytes": "2040" } ], "symlink_target": "" }
from argparse import ArgumentParser from ..ui import input_bool from .base import CommandMixin class User(CommandMixin): """Interact with your profile and check your stats.""" def __print_stats(self, opts): building_count = 0 per_second_income = 0 for coord, planet in self.engine.user_planets(self.engine.user): building_count += len(planet.buildings) per_second_income += planet.rates.trade_value print("{}:\n {} planets\n {} buildings\n" " {} per second income trade value".format( self.engine.user.name, len(self.engine.user.planets), building_count, per_second_income)) def __change_name(self, opts): try: new_name = opts.args.pop(0) except IndexError: print("You can't change your name to nothing. Give us " "a new name for the records if you want a new one.") return prompt = ("Are you sure you want to change your name from " "'{}' to '{}'".format(self.engine.user.name, new_name)) if not input_bool(prompt): print('Okay, see you next time.') return print("Okay, just let me change your name on our records...") # loop through user's planets and change emperor name for coord, planet in self.engine.user_planets(self.engine.user): planet.emperor = new_name # change user's name last self.engine.user.name = new_name print("Alright, er, {}, you should be good to go " "now.".format(new_name)) def __setup_parser(self): parser = ArgumentParser(prog='user', description=User.__doc__) parser.set_defaults(action=self.__print_stats) self._add_argument(parser, '--change-name', const=self.__change_name, help="Change your username.") return parser, self.__print_stats def do_user(self, line): super(User, self)._do(line, self.__setup_parser) def help_user(self): print(User.__doc__)
{ "content_hash": "1f924a1563206ffa5cb5ac2ee290d7c0", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 77, "avg_line_length": 38.27272727272727, "alnum_prop": 0.5843230403800475, "repo_name": "fretboardfreak/space", "id": "ea0bb7d0742aff47575ca957ffa44c0a88ea20af", "size": "2681", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/cmdline/commands/user.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "583112" }, { "name": "Shell", "bytes": "1203" } ], "symlink_target": "" }
from Myro import * # Connect to the robot init() # This is the array that will hold all the pictures as Scribbler spins gif_images = [] # Set camera modes to make capturing faster setPicSize("small") autoCamera() # Now have Scribbler spin in a circle and take a picture with each small turn for i in range(34): setLEDFront(1) # "Flash" the LED like a camera (1 to turn on, 0 to turn off) p = takePicture() # Take the picture gif_images.append(p) # Add that picture to the gif_images array setLEDFront(0) # Turn the LED off turnLeft(.6, .03) # Turn left by a small amount (makes a complete circle in end) beep(.1, 700) # Also beep to let users know it's taken a picture # Print the status of the program print('Done capturing photos') print('Making the gif') # Save the gif file. scribbler_360 is the filename, you can change it if you want savePicture(gif_images, 'scribbler_360.gif') # Beep to hear that it's done beep(.1, 750) beep(.1,800) beep(.1,850)
{ "content_hash": "a23bfbbaa9b3643f49bda26e4ca8f726", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 89, "avg_line_length": 31.625, "alnum_prop": 0.691699604743083, "repo_name": "CSavvy/python", "id": "737a0e65bfa4c2c50ae4c6f601c99b150052c6ac", "size": "1038", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "360_gif.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "92965" } ], "symlink_target": "" }
import sys, getopt, requests, json def printusertext(p_message): #prints a line of text that is meant for the user to read #do not process these lines when chaining scripts print('@ %s' % p_message) def printhelp(): #prints help text printusertext('') printusertext('This is a script to claim a device into Dashboard, create a new network for it and bind') printusertext('the network to a pre-existing template.') printusertext('') printusertext('To run the script, enter:') printusertext('python deployappliance.py -k <key> -o <org> -s <sn> -n <netw> -c <cfg_tmpl> [-t <tags>] [-a <addr>] [-m ignore_error]') printusertext('') printusertext('<key>: Your Meraki Dashboard API key') printusertext('<org>: Name of the Meraki Dashboard Organization to modify') printusertext('<sn>: Serial number of the device to claim') printusertext('<netw>: Name the new network will have') printusertext('<cfg_template>: Name of the config template the new network will bound to') printusertext('-t <tags>: Optional parameter. If defined, network will be tagged with the given tags') printusertext('-a <addr>: Optional parameter. If defined, device will be moved to given street address') printusertext('-m ignore_error: Optional parameter. If defined, the script will not stop if network exists') printusertext('') printusertext('Example:') printusertext('python deployappliance.py -k 1234 -o MyCustomer -s XXXX-YYYY-ZZZZ -n NewBranch -c MyCfgTemplate') printusertext('') printusertext('Use double quotes ("") in Windows to pass arguments containing spaces. Names are case-sensitive.') def getorgid(p_apikey, p_orgname): #looks up org id for a specific org name #on failure returns 'null' r = requests.get('https://dashboard.meraki.com/api/v0/organizations', headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) if r.status_code != requests.codes.ok: return 'null' rjson = r.json() for record in rjson: if record['name'] == p_orgname: return record['id'] return('null') def getshardurl(p_apikey, p_orgid): #patch return("api.meraki.com") def getnwid(p_apikey, p_shardurl, p_orgid, p_nwname): #looks up network id for a network name #on failure returns 'null' r = requests.get('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) if r.status_code != requests.codes.ok: return 'null' rjson = r.json() for record in rjson: if record['name'] == p_nwname: return record['id'] return('null') def createnw(p_apikey, p_shardurl, p_dstorg, p_nwdata): #creates network if one does not already exist with the same name #check if network exists getnwresult = getnwid(p_apikey, p_shardurl, p_dstorg, p_nwdata['name']) if getnwresult != 'null': printusertext('WARNING: Skipping network "%s" (Already exists)' % p_nwdata['name']) return('null') if p_nwdata['type'] == 'combined': #find actual device types nwtype = 'wireless switch appliance' else: nwtype = p_nwdata['type'] if nwtype != 'systems manager': r = requests.post('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_dstorg), data=json.dumps({'timeZone': p_nwdata['timeZone'], 'tags': p_nwdata['tags'], 'name': p_nwdata['name'], 'organizationId': p_dstorg, 'type': nwtype}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) else: printusertext('WARNING: Skipping network "%s" (Cannot create SM networks)' % p_nwdata['name']) return('null') return('ok') def gettemplateid(p_apikey, p_shardurl, p_orgid, p_tname): #looks up config template id for a config template name #on failure returns 'null' r = requests.get('https://%s/api/v0/organizations/%s/configTemplates' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) if r.status_code != requests.codes.ok: return 'null' rjson = r.json() for record in rjson: if record['name'] == p_tname: return record['id'] return('null') def bindnw(p_apikey, p_shardurl, p_nwid, p_templateid): #binds a network to a template r = requests.post('https://%s/api/v0/networks/%s/bind' % (p_shardurl, p_nwid), data=json.dumps({'configTemplateId': p_templateid}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) if r.status_code != requests.codes.ok: return 'null' return('ok') def claimdevice(p_apikey, p_shardurl, p_nwid, p_devserial): #claims a device into an org r = requests.post('https://%s/api/v0/networks/%s/devices/claim' % (p_shardurl, p_nwid), data=json.dumps({'serial': p_devserial}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) return(0) def getdeviceinfo(p_apikey, p_shardurl, p_nwid, p_serial): #returns info for a single device #on failure returns lone device record, with serial number 'null' r = requests.get('https://%s/api/v0/networks/%s/devices/%s' % (p_shardurl, p_nwid, p_serial), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) returnvalue = [] if r.status_code != requests.codes.ok: returnvalue = {'serial':'null'} return(returnvalue) rjson = r.json() return(rjson) def setdevicedata(p_apikey, p_shardurl, p_nwid, p_devserial, p_field, p_value, p_movemarker): #modifies value of device record. Returns the new value #on failure returns one device record, with all values 'null' #p_movemarker is boolean: True/False movevalue = "false" if p_movemarker: movevalue = "true" r = requests.put('https://%s/api/v0/networks/%s/devices/%s' % (p_shardurl, p_nwid, p_devserial), data=json.dumps({p_field: p_value, 'moveMapMarker': movevalue}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'}) if r.status_code != requests.codes.ok: return ('null') return('ok') def main(argv): #set default values for command line arguments arg_apikey = 'null' arg_orgname = 'null' arg_serial = 'null' arg_nwname = 'null' arg_template = 'null' arg_modexisting = 'null' arg_address = 'null' arg_nwtags = 'null' #get command line arguments # python deployappliance.py -k <key> -o <org> -s <sn> -n <netw> -c <cfg_tmpl> [-t <tags>] [-a <addr>] [-m ignore_error] try: opts, args = getopt.getopt(argv, 'hk:o:s:n:c:m:a:t:') except getopt.GetoptError: printhelp() sys.exit(2) for opt, arg in opts: if opt == '-h': printhelp() sys.exit() elif opt == '-k': arg_apikey = arg elif opt == '-o': arg_orgname = arg elif opt == '-s': arg_serial = arg elif opt == '-n': arg_nwname = arg elif opt == '-c': arg_template = arg elif opt == '-m': arg_modexisting = arg elif opt == '-a': arg_address = arg elif opt == '-t': arg_nwtags = arg #check if all parameters are required parameters have been given if arg_apikey == 'null' or arg_orgname == 'null' or arg_serial == 'null' or arg_nwname == 'null' or arg_template == 'null': printhelp() sys.exit(2) #set optional flag to ignore error if network already exists stoponerror = True if arg_modexisting == 'ignore_error': stoponerror = False #get organization id corresponding to org name provided by user orgid = getorgid(arg_apikey, arg_orgname) if orgid == 'null': printusertext('ERROR: Fetching organization failed') sys.exit(2) #get shard URL where Org is stored shardurl = getshardurl(arg_apikey, orgid) if shardurl == 'null': printusertext('ERROR: Fetching Meraki cloud shard URL failed') sys.exit(2) #make sure that a network does not already exist with the same name nwid = getnwid(arg_apikey, shardurl, orgid, arg_nwname) if nwid != 'null' and stoponerror: printusertext('ERROR: Network with that name already exists') sys.exit(2) #get template ID for template name argument templateid = gettemplateid(arg_apikey, shardurl, orgid, arg_template) if templateid == 'null': printusertext('ERROR: Unable to find template' + arg_template) sys.exit(2) #gather parameters to create network #valid values for parameter 'type': 'wireless', 'switch', 'appliance', 'combined', 'wireless switch', etc nwtags = '' if arg_nwtags != 'null': nwtags = arg_nwtags nwparams = {'name': arg_nwname, 'timeZone': 'Europe/Helsinki', 'tags': nwtags, 'organizationId': orgid, 'type': 'appliance'} #create network and get its ID if nwid == 'null': createstatus = createnw (arg_apikey, shardurl, orgid, nwparams) if createstatus == 'null': printusertext('ERROR: Unable to create network') sys.exit(2) nwid = getnwid(arg_apikey, shardurl, orgid, arg_nwname) if nwid == 'null': printusertext('ERROR: Unable to get ID for new network') sys.exit(2) #bind network to template bindstatus = bindnw(arg_apikey, shardurl, nwid, templateid) if bindstatus == 'null' and stoponerror: printusertext('ERROR: Unable to bind network to template') sys.exit(2) #claim device into newly created network claimdevice(arg_apikey, shardurl, nwid, arg_serial) #check if device has been claimed successfully deviceinfo = getdeviceinfo(arg_apikey, shardurl, nwid, arg_serial) if deviceinfo['serial'] == 'null': printusertext('ERROR: Claiming or moving device unsuccessful') sys.exit(2) #set device hostname hostname = deviceinfo['model'] + '_' + arg_serial setdevicedata(arg_apikey, shardurl, nwid, arg_serial, 'name', hostname, False) #if street address is given as a parameter, set device location if arg_address != 'null': setdevicedata(arg_apikey, shardurl, nwid, arg_serial, 'address', arg_address, True) printusertext('End of script.') if __name__ == '__main__': main(sys.argv[1:])
{ "content_hash": "12b1c5b94efd062690121ebb0effa85e", "timestamp": "", "source": "github", "line_count": 273, "max_line_length": 322, "avg_line_length": 36.59340659340659, "alnum_prop": 0.6720720720720721, "repo_name": "meraki/automation-scripts", "id": "c41c43b213bb887bb8cb3c45fdfd6f5d5b58ff22", "size": "10576", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "deployappliance.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2448" }, { "name": "Python", "bytes": "1121262" }, { "name": "Ruby", "bytes": "857" } ], "symlink_target": "" }
import os, sys, shutil from datetime import date import glob from PIL import Image root_path = os.environ["HOME"] ############ # Parameters ############ raw_images_dir = f"{root_path}/ladvien.github.io/raw_images" images_dir = f"{root_path}/ladvien.github.io/images" max_size = 1800 max_file_size_kb = 400 compression_quality = 90 ignore_existing = True ######################### # Move non-compressables ######################### if not ignore_existing: os.system(f"rm -rf {images_dir}") os.system( f'rsync -h -v -r -t --ignore-existing --exclude="*.jpg" --exclude="*.JPG" --exclude="*.png" --exclude="*.PNG" {raw_images_dir}/* {images_dir}' ) ############ # Transforms ############ print("") print("*******************************************************") print("* Moving raw_images into images *") print("*******************************************************") image_paths = ( glob.glob(f"{raw_images_dir}/**/*.jpg", recursive=True) + glob.glob(f"{raw_images_dir}/**/*.JPG", recursive=True) + glob.glob(f"{raw_images_dir}/**/*.png", recursive=True) + glob.glob(f"{raw_images_dir}/**/*.PNG", recursive=True) ) def resize(image, max_size): print("Resizing image...") width, height = image.size ratio = max_size / width new_width = int(width * ratio) new_height = int(height * ratio) return image.resize((new_width, new_height)) image_index = 0 image_count = len(image_paths) for image_path in image_paths: # Get the file name. image_file_name = image_path.split("/")[-1] # Replace spaces with underscores. image_file_name = image_file_name.replace(" ", "_") # Get output directory for file. input_image_dir = raw_images_dir.split("/")[-1] output_image_dir = images_dir.split("/")[-1] image_output_file = image_path.split("/")[-1] image_output_dir = image_path.replace(input_image_dir, output_image_dir).replace( image_output_file, "" ) image_output_file_path = image_output_dir + image_output_file # Ensure the output directory exists. if not os.path.exists(image_output_dir): os.mkdir(image_output_dir) # Get the output file path. output_file_path = f"{image_output_dir}{image_file_name}" if os.path.exists(output_file_path) and ignore_existing: image_index += 1 continue # Determine the starting file size. file_size_kb = os.stat(image_path).st_size / 1000 image = Image.open(image_path) img_width, img_height = image.size # Resize images too large. if img_width > max_size: image = resize(image, max_size) if file_size_kb > max_file_size_kb: if file_size_kb < 400: print("Optimizing level 1") image.save(image_output_file_path, optimize=True, quality=95) elif file_size_kb < 500: print("Optimizing level 2") image.save(image_output_file_path, optimize=True, quality=85) elif file_size_kb < 600: print("Optimizing level 3") image.save(image_output_file_path, optimize=True, quality=85) else: print("Optimizing level 4") image.save(image_output_file_path, optimize=True, quality=85) file_size_kb_new = os.stat(image_output_file_path).st_size / 1000 print( f"{image_index} / {image_count} = {round((image_index / image_count) * 100, 2)}% -- File size before {file_size_kb}kb and after {file_size_kb_new}kb" ) else: print("Already optimized.") image.save(image_output_file_path) image_index += 1
{ "content_hash": "c3670908d4d3345794e5b6b773402b4b", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 161, "avg_line_length": 29.422764227642276, "alnum_prop": 0.5871787786681404, "repo_name": "Ladvien/ladvien.github.io", "id": "9e83db3f2387495bd0371a241cbe24952dc8157e", "size": "3619", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "admin_optimize_images.py", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "11010" }, { "name": "HTML", "bytes": "40281" }, { "name": "JavaScript", "bytes": "141200" }, { "name": "Python", "bytes": "13742" }, { "name": "Ruby", "bytes": "964" }, { "name": "SCSS", "bytes": "46988" }, { "name": "Shell", "bytes": "7015" } ], "symlink_target": "" }
from Metric import Metric class RPM(Metric): def __init__(self, name, value): Metric.__init__(self, name) self.value = value
{ "content_hash": "27efb43b9d03818b69385a84ae2dc98c", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 36, "avg_line_length": 29, "alnum_prop": 0.6068965517241379, "repo_name": "USCSolarCarStrategyTeam/telemetry-pi", "id": "5b341fe9e6cef53f539a08f901ade9c9cb0d5e88", "size": "145", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "telemetry-pi/model/RPM.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "10769" } ], "symlink_target": "" }
import sys import os from datetime import datetime as dt import re from glob import glob import numpy as np import json import matplotlib as mpl mpl.use('Agg') from matplotlib import pyplot as plt ##------------------------------------------------------------------------- ## Main Program ##------------------------------------------------------------------------- def main(): bars = [i for i in range(1,93)] log_files = sorted(glob('/h/instrlogs/mosfire/*/CSU.log*')) log_files.append('/s/sdata1300/syslogs/CSU.log') filedates = {} odometers = {} moves = {} for i,log_file in enumerate(log_files): log_path, log_filename = os.path.split(log_file) if os.path.exists(os.path.join(log_path, 'odometer.json')): print(f"Loading results from {os.path.join(log_path, 'odometer.json')}") with open(os.path.join(log_path, 'odometer.json'), 'r') as FO: result = json.load(FO) odometers[log_file] = np.array(result['odometer'], dtype=np.float64) moves[log_file] = np.array(result['nmoves'], dtype=np.int) filedates[log_file] = [dt.strptime(result['dates'][0], '%Y-%m-%dT%H:%M:%S'), dt.strptime(result['dates'][1], '%Y-%m-%dT%H:%M:%S')] last_file = log_file else: nmoves = [0 for i in range(1,93)] mileage = [0. for i in range(1,93)] month_decimal = 0 year_iteration = 0 last_pos = {} for bar in bars: last_pos[bar] = 0 try: print(f'Reading {log_file}') with open(log_file, 'r') as FO: contents = FO.readlines() print(f' Analyzing {len(contents):,} lines') file_start = None file_end = None last_line = '' for line in contents: match = re.search(f'Record=<(\d\d),', line) if match is not None: bar = int(match.group(1)) md = re.match('(\w+)\s+(\d+)\s+(\d+:\d+:\d+).+', line) if md is not None: date_string = f"{md.group(1)} {int(md.group(2)):02d} {md.group(3)}" date = dt.strptime(date_string, '%b %d %H:%M:%S') if date.month < month_decimal: print(f' Iterating year: {month_decimal} {date.month}') year_iteration +=1 month_decimal = date.month if file_start is None: file_start = date ## Look for overlap for compfile in filedates.keys(): fdc = filedates[compfile] if (file_start >= fdc[0]) and (file_start <= fdc[1]): print(f'WARNING: Found Overlap with {compfile}') print(file_start) print(compfile, filedates[compfile]) sys.exit(0) else: file_end = date barpos = float(line.strip('\n').split(',')[2]) if last_pos[bar] != 0: delta = last_pos[bar] - barpos if abs(delta) > 0: nmoves[bar-1] += 1 mileage[bar-1] += abs(delta) last_pos[bar] = barpos last_line = line else: print(line) assert file_start is not None assert file_end is not None folder = os.path.split(os.path.split(log_file)[0])[1] try: folderdt = dt.strptime(folder, '%Y%m%d') year = folderdt.year except ValueError: year = dt.utcnow().year file_end = dt.strptime(file_end.strftime(f'{year} %m %d %H:%M:%S'), '%Y %m %d %H:%M:%S') file_start = dt.strptime(file_start.strftime(f'{year-year_iteration} %m %d %H:%M:%S'), '%Y %m %d %H:%M:%S') timespan = file_end - file_start filedates[log_file] = [file_start, file_end] if i > 0: gap = file_start - filedates[last_file][1] print(f' Gap of {gap} between start of this and end of last log file') last_file = log_file print(f' File covers: {file_start} to {file_end} ({timespan})') odometers[log_file] = np.array(mileage, dtype=np.float64) moves[log_file] = np.array(nmoves, dtype=np.int) ## Save Result if log_file != '/s/sdata1300/syslogs/CSU.log': result = {'odometer': mileage, 'nmoves': nmoves, 'dates': [file_start.isoformat(timespec='seconds'), file_end.isoformat(timespec='seconds')]} with open(os.path.join(log_path, 'odometer.json'), 'w') as FO: json.dump(result, FO) except UnicodeDecodeError: print(f' Unable to read {log_file}') ## Sum All log file results nmoves = np.array([0 for i in range(1,93)], dtype=np.int) mileage = np.array([0 for i in range(1,93)], dtype=np.float64) for i,log_file in enumerate(log_files): if log_file in odometers.keys(): nmoves += np.array(moves[log_file]) mileage += np.array(odometers[log_file]) bars = np.array(bars) mileage /= 1000 slits = np.array([int((b+1)/2) for b in bars]) left = np.array([(b%2 != 0) for b in bars]) right = np.array([(b%2 == 0) for b in bars]) maxmileage = 0 maxmoves = 0 for s in set(slits): ids = np.where(slits == s) slitmileage = np.sum(mileage[ids]) maxmileage = max([maxmileage, slitmileage]) slitmoves = np.sum(nmoves[ids]) maxmoves = max([maxmoves, slitmoves]) plt.ioff() plt.figure(figsize=(16,10)) plt.title('CSU Bar Mileage', fontsize=18) plt.bar(slits[left], mileage[left], width=0.9, align='center', color='b') plt.bar(slits[right], -mileage[right], bottom=maxmileage, width=0.9, align='center', color='g') plt.xlim(0,47) plt.xlabel("Slit Number", fontsize=18) plt.ylabel("Mileage (m)", fontsize=18) plt.grid() plt.savefig('CSU_Bar_Mileage.png', bbox_inches='tight', pad_inches=0.10) plt.figure(figsize=(16,10)) plt.title('Number of CSU Bar Moves', fontsize=18) plt.bar(slits[left], nmoves[left], width=0.9, align='center', color='b') plt.bar(slits[right], -nmoves[right], bottom=maxmoves, width=0.9, align='center', color='g') plt.xlim(0,47) plt.xlabel("Slit Number", fontsize=18) plt.ylabel("N Moves", fontsize=18) plt.grid() plt.savefig('CSU_Bar_Moves.png', bbox_inches='tight', pad_inches=0.10) if __name__ == '__main__': main()
{ "content_hash": "6b9bd6bfac2e5ffad53e86a2b5f3dde8", "timestamp": "", "source": "github", "line_count": 177, "max_line_length": 123, "avg_line_length": 42.07909604519774, "alnum_prop": 0.46415145005370567, "repo_name": "joshwalawender/KeckUtilities", "id": "d4efcf3ab215dbb474212af2764a0bfd8e3a0cc1", "size": "7491", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MOSFIRE/CSU_Odometer/odometer.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Jupyter Notebook", "bytes": "499302" }, { "name": "Python", "bytes": "134339" } ], "symlink_target": "" }
from rest_framework import viewsets from .serializers import WineSerializer from .models import Wine class WineViewSet(viewsets.ReadOnlyModelViewSet): queryset = Wine.objects.all() serializer_class = WineSerializer
{ "content_hash": "681d41464e36a760e851958940b1ea8a", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 49, "avg_line_length": 25.11111111111111, "alnum_prop": 0.8008849557522124, "repo_name": "ctbarna/cellar", "id": "ae69ea2d5f35ab3ee596aca98db999cfe124e9ab", "size": "226", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/wine/api.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "185" }, { "name": "Python", "bytes": "22892" } ], "symlink_target": "" }
from pymongo import MongoClient from pymongo import ReadPreference import os import mysql.connector as mysql import time import datetime metrics_mysql_password = os.environ["METRICS_MYSQL_PWD"] sql_host = os.environ["SQL_HOST"] query_on = os.environ["QUERY_ON"] def upload_user_orcid_count(): """ Populates the table with the public narrative count """ # connect to mysql db_connection = mysql.connect( host=sql_host, user="metrics", passwd=metrics_mysql_password, database="metrics" ) cursor = db_connection.cursor() query = "use " + query_on cursor.execute(query) user_orcid_count = None select_query = ( "select count(*) from metrics_reporting.user_info_summary_stats " "where kb_internal_user = 0 and orcid is not null" ) cursor.execute(select_query) for row in cursor: user_orcid_count = row[0] print("COUNT RETRIEVED: " + str(user_orcid_count)) insert_cursor = db_connection.cursor(prepared=True) oidc_insert_statement = ( "insert into metrics.user_orcid_count " "(user_orcid_count) values(%s)" ) input_vals = (user_orcid_count,) insert_cursor.execute(oidc_insert_statement, input_vals) db_connection.commit() print("User ORCID Count : " + str(user_orcid_count)) return print("############################################") print("USER ORCID count Upload (UTC): " + str(datetime.datetime.utcnow())) start_time = time.time() upload_user_orcid_count() print("--- USER ORCID count time : %s seconds ---" % (time.time() - start_time))
{ "content_hash": "87bf7b2d6f6af2c73a423e6380f7a41e", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 88, "avg_line_length": 30.692307692307693, "alnum_prop": 0.6441102756892231, "repo_name": "kbase/metrics", "id": "b801eefd6d45ec7437d7713168bbd8dc7bfd9aec", "size": "1596", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "source/daily_cron_jobs/upload_user_orcid_count.py", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "2153" }, { "name": "Python", "bytes": "583584" }, { "name": "Shell", "bytes": "3453" } ], "symlink_target": "" }
""" Miscellaneous utility functions shared across all games. """ import random import readline from lib import term def Delay(): """Used as a delay instead of the traditional BASIC busy-wait.""" term.Write(term.RESET, '\n[Press Enter to continue.]') raw_input() def Hello(title, version): """Displays a standard welcome banner common to all games.""" term.Write(term.CLEAR) term.WriteLn(term.BOLD_WHITE, title) term.WriteLn(term.BOLD_GREEN, 'Inspired by ', term.BOLD_RED, '101 BASIC Computer Games') term.WriteLn(term.BOLD_GREEN, 'Python Version ', version, ' by ', term.BOLD_YELLOW, 'Chaosotter') term.WriteLn(term.RESET) term.WriteLn() random.seed() def Input(prompt): """Standard input function; returns user input as a string.""" return raw_input(term.RESET + prompt + ' ' + term.BOLD_YELLOW) def InputYN(prompt): """Standard yes or no input function; returns a boolean.""" value = '' while value not in ('y', 'n'): value = Input(prompt) if len(value) > 1: value = value[0] return (value == 'y') def InputInt(prompt, min, max): """Inputs an integer in the range [min, max] inclusive.""" value = min - 1 while (value < min) or (value > max): try: value = int(Input(prompt)) except ValueError: value = min - 1 return value
{ "content_hash": "6787bd03a08028d8839e66d6c758a9e8", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 69, "avg_line_length": 29.541666666666668, "alnum_prop": 0.614950634696756, "repo_name": "chaosotter/python-101-games", "id": "492a8cf63da835d1ee0083de0cdfe2e4b507013d", "size": "1418", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/common.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "48278" } ], "symlink_target": "" }
import os import sys import json import SocketServer import socket import time from datetime import datetime, timedelta from logsparser.lognormalizer import LogNormalizer as LN from threading import Thread, Lock from collectornetflow import find_flow, print_flow import collector_config import Queue from correlator import syslogQueue messages = [] messages_lock = Lock() debug_log = open("syslog.log", "w+") debug_log.write("Log started at {0}\n{1}\n".format(datetime.now(), '-'*150)) do_flush = True normalizer = LN('/usr/local/share/logsparser/normalizers') """ TODO: flush all unsaved messages when exit thread def flushSyslog() : while do_flush : messages_lock.acquire() for msg in messages : log = msg['msg'] if msg['flow'] == None : a = None b = None c = None d = None if 'source_ip' in log: a = log['source_ip'] if 'source_port' in log: b = log['source_port'] if 'dest_ip' in log: c = log['dest_ip'] if 'dest_port' in log: d = log['dest_port'] msg['flow'] = find_flow(a,b,c,d,log['date']) flush_time = datetime.now() if flush_time - msg['recieved_at'] < timedelta(seconds=15) : continue # flush only records, that are 15 seconds old debug_log.write("Message from {0} at {1}\n".format(msg['source'], msg['recieved_at'])) for key in log.keys() : if key != 'raw' : debug_log.write("{0} => {1}\n".format(key, str(log[key]))) if len(log.keys()) < 2 : debug_log.write("{0} => {1}\n".format(log.keys()[0], str(log[key]))) debug_log.write("tags => parsefailure") else : debug_log.write("raw => {0}\n".format(str(log['raw']))) if msg['flow'] != None : debug_log.write("associated flow => {0}\n".format(print_flow(msg['flow']))) debug_log.write("{0}\n".format('-'*150)) messages.remove(msg) messages_lock.release() os.fsync(debug_log.fileno()) time.sleep(1) # Sleep for 5 seconds print("messages len {0}".format(len(messages))) """ def _parse_logdata(l, t='syslog', p=None) : log = {} if p : log = {'raw' : l[:], 'program': p, 'body': l[:] } # a LogNormalizer expects input as a dictionary, grab log line, remove the trailing \n else : log = {'raw' : l[:], 'logtype' : t } # a LogNormalizer expects input as a dictionary, grab log line, remove the trailing \n try : normalizer.normalize(log) except : print(sys.exc_info()) if 'tags' in log.keys() : log['tags'] = log['tags'] + ', parsefailure' else : log['tags'] = 'parsefailure' return log class CollectorSyslogHandler(SocketServer.DatagramRequestHandler): def handle(self): now = datetime.now() #debug_log.write("SYSLOG connection from {0}\n".format(str(self.client_address))) if collector_config.be_verbose : print("SYSLOG connection from {0}".format(str(self.client_address))) data = bytes.decode(self.request[0].strip()) #socket = self.request[1] l = str(data) #debug_log.write("{0} : {1}\n".format(self.client_address[0], l)) log = _parse_logdata(l) #log = _parse_logdata(l, p="mikrotik") try : log["dest_ip"] = socket.gethostbyname_ex(log["source"]) except : #log["dest_host_ex"] = "{0} log source".format(log["source"]) log["dest_ip"] = "{0} log source".format(self.client_address[0]) msg = {} msg['recieved_at'] = now msg['source'] = str(self.client_address) msg['msg'] = log msg['flow'] = None #messages_lock.acquire() syslogQueue.put(msg) #messages_lock.release() # logging.info(str(data)) if __name__ == "__main__": if len(sys.argv) != 2 : print('{0} used to parse provided logfile. have some twerks to handle standard syslog and web-server formats'.format(sys.argv[0])) print("Need to give log file name to parse") quit() if os.path.isfile(sys.argv[1]): log_path = sys.argv[1] #print(normalizer.dtd.error_log.filter_from_errors()[0]) for l in open(log_path, 'r') : print('-'*25) # print logline[:-1] # grab log line, remove the trailing \n if l[0:3] == "May" or l[0] == '<': log = _parse_logdate(l) else : log = _parse_logdate(l, p='nginx') for key in log.keys() : if key != 'raw' : print(key + " => " + str(log[key])) if len(log.keys()) < 2 : print(log.keys()[0] + " => " + str(log[key])) print("tags => parsefailure") else : print("raw => " + str(log['raw'])) # print log
{ "content_hash": "7fef2090b194c2bccf38a61f93709e59", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 134, "avg_line_length": 34.241379310344826, "alnum_prop": 0.5460221550855991, "repo_name": "ilmarh/collectors", "id": "af46b93ff2f716bf3e46cd68a3a72324462fe6e0", "size": "4991", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "collectorsyslog.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "29967" } ], "symlink_target": "" }
""" Cloudbreak API Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a> OpenAPI spec version: 2.9.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class ExposedServiceResponse(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'service_name': 'str', 'display_name': 'str', 'knox_service': 'str', 'knox_url': 'str' } attribute_map = { 'service_name': 'serviceName', 'display_name': 'displayName', 'knox_service': 'knoxService', 'knox_url': 'knoxUrl' } def __init__(self, service_name=None, display_name=None, knox_service=None, knox_url=None): """ ExposedServiceResponse - a model defined in Swagger """ self._service_name = None self._display_name = None self._knox_service = None self._knox_url = None if service_name is not None: self.service_name = service_name if display_name is not None: self.display_name = display_name if knox_service is not None: self.knox_service = knox_service if knox_url is not None: self.knox_url = knox_url @property def service_name(self): """ Gets the service_name of this ExposedServiceResponse. :return: The service_name of this ExposedServiceResponse. :rtype: str """ return self._service_name @service_name.setter def service_name(self, service_name): """ Sets the service_name of this ExposedServiceResponse. :param service_name: The service_name of this ExposedServiceResponse. :type: str """ self._service_name = service_name @property def display_name(self): """ Gets the display_name of this ExposedServiceResponse. :return: The display_name of this ExposedServiceResponse. :rtype: str """ return self._display_name @display_name.setter def display_name(self, display_name): """ Sets the display_name of this ExposedServiceResponse. :param display_name: The display_name of this ExposedServiceResponse. :type: str """ self._display_name = display_name @property def knox_service(self): """ Gets the knox_service of this ExposedServiceResponse. :return: The knox_service of this ExposedServiceResponse. :rtype: str """ return self._knox_service @knox_service.setter def knox_service(self, knox_service): """ Sets the knox_service of this ExposedServiceResponse. :param knox_service: The knox_service of this ExposedServiceResponse. :type: str """ self._knox_service = knox_service @property def knox_url(self): """ Gets the knox_url of this ExposedServiceResponse. :return: The knox_url of this ExposedServiceResponse. :rtype: str """ return self._knox_url @knox_url.setter def knox_url(self, knox_url): """ Sets the knox_url of this ExposedServiceResponse. :param knox_url: The knox_url of this ExposedServiceResponse. :type: str """ self._knox_url = knox_url def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, ExposedServiceResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
{ "content_hash": "243302f5c850c5cb31fb618dec4ebfad", "timestamp": "", "source": "github", "line_count": 199, "max_line_length": 984, "avg_line_length": 30.98994974874372, "alnum_prop": 0.5949408140100535, "repo_name": "Chaffelson/whoville", "id": "e70d5a24680fcb1d36e4e008ba535550e239bddb", "size": "6184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "whoville/cloudbreak/models/exposed_service_response.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "6961" }, { "name": "HTML", "bytes": "72038" }, { "name": "Python", "bytes": "3729355" }, { "name": "Shell", "bytes": "95963" }, { "name": "TSQL", "bytes": "345" } ], "symlink_target": "" }
import os import xmltodict # sudo easy_install xmltodict import subprocess import zipfile class PackAndroid(object): def __init__(self, root, project_folder, project, input_apk, destination, keystore, keystore_alias, apk_name=None, zipalign=None, jarsigner=None, configuration='Release', keystore_password=None): self.name = project_folder self.proj_folder = project_folder self.project = project self.input_apk = input_apk self.destination = os.path.expanduser(destination) self.configuration = configuration self.keystore = keystore self.keystore_alias = keystore_alias self.keystore_password = keystore_password # Name of the final apk self.apk_name = apk_name if self.apk_name is None and self.keystore_alias is not None: self.apk_name = self.keystore_alias.lower() if self.apk_name is None: projf = os.path.basename(project) self.apk_name = projf.replace('.csproj', '') self.final_apk = os.path.join(self.destination, "%s-" % self.apk_name) self.signed_apk = os.path.join(self.destination, "%s-signed.apk" % self.apk_name) self.zipalign = zipalign if self.zipalign is None: self.zipalign = '/usr/bin/zipalign' self.jarsigner = jarsigner if self.jarsigner is None: self.jarsigner = "/usr/bin/jarsigner" self.keystore = os.path.join(root, self.keystore) self.project = os.path.join(root, self.project) self.proj_folder = os.path.join(root, self.proj_folder) self.input_apk = os.path.join(self.proj_folder, self.input_apk) if not os.path.exists(self.keystore): exit("Failed to locate keystore - " + self.keystore) if not os.path.exists(self.zipalign): exit("Failed to locate zipalign - " + self.zipalign) if not os.path.exists(self.jarsigner): exit("Failed to locate jarsigner - " + self.jarsigner) def clean(self): bin_folder = os.path.join(self.proj_folder, 'bin') obj_folder = os.path.join(self.proj_folder, 'obj') if os.path.exists(bin_folder): print 'Clearing away ' + bin_folder os.system('rm -fdr ' + bin_folder) if os.path.exists(obj_folder): print 'Clearing away ' + obj_folder os.system('rm -fdr ' + obj_folder) def get_manifest_dictionary(self): manifest = os.path.join(self.proj_folder, 'Properties/AndroidManifest.xml') if not os.path.exists(manifest): exit("Failed to locate AndroidManifest.xml - " + manifest) f = file(manifest) xml = f.read() f.close() doc = xmltodict.parse(xml) return doc def get_build_number(self): doc = self.get_manifest_dictionary() return doc['manifest']['@android:versionCode'] def get_version_number(self): doc = self.get_manifest_dictionary() return doc['manifest']['@android:versionName'] def set_build_number(self, build_num): doc = self.get_manifest_dictionary() doc['manifest']['@android:versionCode'] = build_num xml = xmltodict.unparse(doc, pretty=True) manifest = os.path.join(self.proj_folder, 'Properties/AndroidManifest.xml') if not os.path.exists(manifest): exit("Failed to locate AndroidManifest.xml - " + manifest) f = file(manifest, 'w') f.write(xml) f.close() def increment_build_number(self): build_number = self.get_build_number() if build_number is None: build_number = "1" else: build_number = str(int(build_number)+1) self.set_build_number(build_number) def decrement_build_number(self): build_number = self.get_build_number() if build_number is None: build_number = "1" else: build_number = str(int(build_number)-1) self.set_build_number(build_number) def set_version_number(self, version): doc = self.get_manifest_dictionary() doc['manifest']['@android:versionName'] = version xml = xmltodict.unparse(doc, pretty=True) manifest = os.path.join(self.proj_folder, 'Properties/AndroidManifest.xml') if not os.path.exists(manifest): exit("Failed to locate AndroidManifest.xml - " + manifest) f = file(manifest, 'w') f.write(xml) f.close() def build(self): cmd_update = "msbuild %s /t:UpdateAndroidResources /p:Configuration=%s" % (self.project, self.configuration) os.system(cmd_update) cmd = "msbuild %s /t:SignAndroidPackage /p:Configuration=%s" % (self.project, self.configuration) os.system(cmd) if not os.path.exists(self.input_apk): exit("Failed to build raw apk, i.e. its missing - " + self.input_apk) @staticmethod def convert_windows_path(any_path): chars = [] for i in range(len(any_path)): char = any_path[i] if char == '\\': chars.append('/') else: chars.append(char) return ''.join(chars) @staticmethod def update_solution_resources(solution,configuration): if not os.path.exists(solution): exit("Failed to locate %s - " % os.path.basename(solution)) f = file(solution) sln = f.read() f.close() projects = [] lines = sln.split('\n') for line in lines: if line.startswith("Project("): start = line.find(",") rest = line[start+3:len(line)] end = rest.find(",") projects.append(os.path.abspath(os.path.join(os.path.dirname(solution),PackAndroid.convert_windows_path(rest[0:end-1])))) # print projects for project in projects: cmd_update = "msbuild %s /t:UpdateAndroidResources /p:Configuration=%s" % (project, configuration) os.system(cmd_update) def sign(self): sign_cmd = [self.jarsigner, "-verbose", "-sigalg", "MD5withRSA", "-digestalg", "SHA1", "-keystore", self.keystore] if not self.keystore_password is None: sign_cmd.extend(["-storepass",self.keystore_password]) sign_cmd.extend(["-signedjar", self.signed_apk, self.input_apk, self.keystore_alias]) subprocess.call(sign_cmd) subprocess.call([self.zipalign, "-f", "-v", "4", self.signed_apk, self.final_apk]) if os.path.exists(self.final_apk): if os.path.exists(self.signed_apk): os.system('rm ' + self.signed_apk) def update_version(self): build_number = self.get_build_number() print build_number q = raw_input("Would you like to increment the build number for %s? y/n\n> " % self.apk_name) if q == "y": build_number = str(int(build_number)+1) self.set_build_number(build_number) version_number = self.get_version_number() print version_number q = raw_input("Would you like to change the version number for %s? y/n\n> " % self.apk_name) if q == "y": version_number = raw_input("What to?> ") self.set_version_number(version_number) def copy_symbols(self): artifacts_folder = os.path.join(self.proj_folder, 'bin', 'Release') stuff = os.listdir(artifacts_folder) msym_folder = None for name in stuff: if name.endswith(".mSYM"): msym_folder = os.path.join(artifacts_folder, name) break if msym_folder is not None: def zipdir(path, ziph): for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file),os.path.relpath(os.path.join(root, file), os.path.join(path, '..'))) msym_destination = os.path.join(os.path.expanduser("~/Desktop/"), os.path.basename(self.final_apk)) + ".mSYM.zip" zipf = zipfile.ZipFile(msym_destination, 'w', zipfile.ZIP_DEFLATED) zipdir(msym_folder, zipf) zipf.close() def run(self, update_versions=True, confirm_build=True): self.clean() self.final_apk = os.path.join(self.destination, "%s-" % self.apk_name) if update_versions: self.update_version() build_number = self.get_build_number() version_number = self.get_version_number() if confirm_build: print 'So thats version ' + version_number + " build " + build_number q = raw_input("Would you like to continue? y/n\n> ") if q != "y": print "Ok, not doing the build, suit yourself..." return None self.final_apk = self.final_apk + build_number + '-' + version_number + '.apk' print self.final_apk self.build() self.sign() self.copy_symbols() return self.final_apk
{ "content_hash": "81c28c533207407fb592f3388734fc87", "timestamp": "", "source": "github", "line_count": 234, "max_line_length": 199, "avg_line_length": 38.837606837606835, "alnum_prop": 0.589018485915493, "repo_name": "skela/r", "id": "7909e7a1e55e682a2b4294c7b151519f39f02483", "size": "9111", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packandroid.py", "mode": "33261", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "585" }, { "name": "Makefile", "bytes": "376" }, { "name": "Python", "bytes": "134353" } ], "symlink_target": "" }
import itertools import logging import numpy import operator import pprint import theano import theano.tensor as TT from theano.ifelse import ifelse import groundhog.utils as utils from groundhog.datasets import PytablesBitextIterator from groundhog.layers import \ Layer, \ MultiLayer, \ SoftmaxLayer, \ HierarchicalSoftmaxLayer, \ LSTMLayer, \ RecurrentLayer, \ RecursiveConvolutionalLayer, \ UnaryOp, \ Shift, \ LastState, \ DropOp, \ Concatenate from groundhog.models import LM_Model from groundhog.utils import sample_zeros, sample_weights_orth, init_bias, sample_weights_classic logger = logging.getLogger(__name__) def create_padded_batch(state, x, y, return_dict=False): """A callback given to the iterator to transform data in suitable format :type x: list :param x: list of numpy.array's, each array is a batch of phrases in some of source languages :type y: list :param y: same as x but for target languages :param new_format: a wrapper to be applied on top of returned value :returns: a tuple (X, Xmask, Y, Ymask) where - X is a matrix, each column contains a source sequence - Xmask is 0-1 matrix, each column marks the sequence positions in X - Y and Ymask are matrices of the same format for target sequences OR new_format applied to the tuple Notes: * actually works only with x[0] and y[0] * len(x[0]) thus is just the minibatch size * len(x[0][idx]) is the size of sequence idx """ mx = state['seqlen'] my = state['seqlen'] if state['trim_batches']: # Similar length for all source sequences mx = numpy.minimum(state['seqlen'], max([len(xx) for xx in x[0]])) + 1 # Similar length for all target sequences my = numpy.minimum(state['seqlen'], max([len(xx) for xx in y[0]])) + 1 # Batch size n = x[0].shape[0] X = numpy.zeros((mx, n), dtype='int64') Y = numpy.zeros((my, n), dtype='int64') Xmask = numpy.zeros((mx, n), dtype='float32') Ymask = numpy.zeros((my, n), dtype='float32') # Fill X and Xmask for idx in xrange(len(x[0])): # Insert sequence idx in a column of matrix X if mx < len(x[0][idx]): X[:mx, idx] = x[0][idx][:mx] else: X[:len(x[0][idx]), idx] = x[0][idx][:mx] # Mark the end of phrase if len(x[0][idx]) < mx: X[len(x[0][idx]):, idx] = state['null_sym_source'] # Initialize Xmask column with ones in all positions that # were just set in X Xmask[:len(x[0][idx]), idx] = 1. if len(x[0][idx]) < mx: Xmask[len(x[0][idx]), idx] = 1. # Fill Y and Ymask in the same way as X and Xmask in the previous loop for idx in xrange(len(y[0])): Y[:len(y[0][idx]), idx] = y[0][idx][:my] if len(y[0][idx]) < my: Y[len(y[0][idx]):, idx] = state['null_sym_target'] Ymask[:len(y[0][idx]), idx] = 1. if len(y[0][idx]) < my: Ymask[len(y[0][idx]), idx] = 1. null_inputs = numpy.zeros(X.shape[1]) # We say that an input pair is valid if both: # - either source sequence or target sequence is non-empty # - source sequence and target sequence have null_sym ending # Why did not we filter them earlier? for idx in xrange(X.shape[1]): if numpy.sum(Xmask[:, idx]) == 0 and numpy.sum(Ymask[:, idx]) == 0: null_inputs[idx] = 1 if Xmask[-1, idx] and X[-1, idx] != state['null_sym_source']: null_inputs[idx] = 1 if Ymask[-1, idx] and Y[-1, idx] != state['null_sym_target']: null_inputs[idx] = 1 valid_inputs = 1. - null_inputs # Leave only valid inputs X = X[:, valid_inputs.nonzero()[0]] Y = Y[:, valid_inputs.nonzero()[0]] Xmask = Xmask[:, valid_inputs.nonzero()[0]] Ymask = Ymask[:, valid_inputs.nonzero()[0]] if len(valid_inputs.nonzero()[0]) <= 0: return None # Unknown words X[X >= state['n_sym_source']] = state['unk_sym_source'] Y[Y >= state['n_sym_target']] = state['unk_sym_target'] if return_dict: return {'x': X, 'x_mask': Xmask, 'y': Y, 'y_mask': Ymask} else: return X, Xmask, Y, Ymask def get_batch_iterator(state): class Iterator(PytablesBitextIterator): def __init__(self, *args, **kwargs): PytablesBitextIterator.__init__(self, *args, **kwargs) self.batch_iter = None self.peeked_batch = None def get_homogenous_batch_iter(self): while True: k_batches = state['sort_k_batches'] batch_size = state['bs'] data = [PytablesBitextIterator.next(self) for k in range(k_batches)] x = numpy.asarray(list(itertools.chain(*map(operator.itemgetter(0), data)))) y = numpy.asarray(list(itertools.chain(*map(operator.itemgetter(1), data)))) lens = numpy.asarray([map(len, x), map(len, y)]) order = numpy.argsort(lens.max(axis=0)) if state['sort_k_batches'] > 1 \ else numpy.arange(len(x)) for k in range(k_batches): indices = order[k * batch_size:(k + 1) * batch_size] batch = create_padded_batch(state, [x[indices]], [y[indices]], return_dict=True) if batch: yield batch def next(self, peek=False): if not self.batch_iter: self.batch_iter = self.get_homogenous_batch_iter() if self.peeked_batch: # Only allow to peek one batch assert not peek logger.debug("Use peeked batch") batch = self.peeked_batch self.peeked_batch = None return batch if not self.batch_iter: raise StopIteration batch = next(self.batch_iter) if peek: self.peeked_batch = batch return batch train_data = Iterator( batch_size=int(state['bs']), target_file=state['target'][0], source_file=state['source'][0], can_fit=False, queue_size=1000, shuffle=state['shuffle'], use_infinite_loop=state['use_infinite_loop'], max_len=state['seqlen']) valid_data = Iterator( batch_size=int(state['bs']), target_file=state['valid_target'][0], source_file=state['valid_source'][0], can_fit=False, queue_size=1000, shuffle=state['shuffle'], use_infinite_loop=state['use_infinite_loop'], max_len=state['seqlen']) return train_data, valid_data class RecurrentLayerWithSearch(Layer): """A copy of RecurrentLayer from groundhog""" def __init__(self, rng, n_hids, c_dim=None, scale=.01, activation=TT.tanh, bias_fn='init_bias', bias_scale=0., init_fn='sample_weights', gating=False, reseting=False, dropout=1., gater_activation=TT.nnet.sigmoid, reseter_activation=TT.nnet.sigmoid, weight_noise=False, name=None): logger.debug("RecurrentLayerWithSearch is used") self.grad_scale = 1 assert gating == True assert reseting == True assert dropout == 1. assert weight_noise == False updater_activation = gater_activation if type(init_fn) is str or type(init_fn) is unicode: init_fn = eval(init_fn) if type(bias_fn) is str or type(bias_fn) is unicode: bias_fn = eval(bias_fn) if type(activation) is str or type(activation) is unicode: activation = eval(activation) if type(updater_activation) is str or type(updater_activation) is unicode: updater_activation = eval(updater_activation) if type(reseter_activation) is str or type(reseter_activation) is unicode: reseter_activation = eval(reseter_activation) self.scale = scale self.activation = activation self.n_hids = n_hids self.bias_scale = bias_scale self.bias_fn = bias_fn self.init_fn = init_fn self.updater_activation = updater_activation self.reseter_activation = reseter_activation self.c_dim = c_dim assert rng is not None, "random number generator should not be empty!" super(RecurrentLayerWithSearch, self).__init__(self.n_hids, self.n_hids, rng, name) self.params = [] self._init_params() def _init_params(self): self.W_hh = theano.shared( self.init_fn(self.n_hids, self.n_hids, -1, self.scale, rng=self.rng), name="W_%s" % self.name) self.params = [self.W_hh] self.G_hh = theano.shared( self.init_fn(self.n_hids, self.n_hids, -1, self.scale, rng=self.rng), name="G_%s" % self.name) self.params.append(self.G_hh) self.R_hh = theano.shared( self.init_fn(self.n_hids, self.n_hids, -1, self.scale, rng=self.rng), name="R_%s" % self.name) self.params.append(self.R_hh) self.A_cp = theano.shared( sample_weights_classic(self.c_dim, self.n_hids, -1, 10 ** (-3), rng=self.rng), name="A_%s" % self.name) self.params.append(self.A_cp) self.B_hp = theano.shared( sample_weights_classic(self.n_hids, self.n_hids, -1, 10 ** (-3), rng=self.rng), name="B_%s" % self.name) self.params.append(self.B_hp) self.D_pe = theano.shared( numpy.zeros((self.n_hids, 1), dtype="float32"), name="D_%s" % self.name) self.params.append(self.D_pe) self.params_grad_scale = [self.grad_scale for x in self.params] def set_decoding_layers(self, c_inputer, c_reseter, c_updater): self.c_inputer = c_inputer self.c_reseter = c_reseter self.c_updater = c_updater for layer in [c_inputer, c_reseter, c_updater]: self.params += layer.params self.params_grad_scale += layer.params_grad_scale def step_fprop(self, state_below, state_before, gater_below=None, reseter_below=None, mask=None, c=None, c_mask=None, p_from_c=None, use_noise=True, no_noise_bias=False, step_num=None, return_alignment=False): """ Constructs the computational graph of this layer. :type state_below: theano variable :param state_below: the input to the layer :type mask: None or theano variable :param mask: mask describing the length of each sequence in a minibatch :type state_before: theano variable :param state_before: the previous value of the hidden state of the layer :type updater_below: theano variable :param updater_below: the input to the update gate :type reseter_below: theano variable :param reseter_below: the input to the reset gate :type use_noise: bool :param use_noise: flag saying if weight noise should be used in computing the output of this layer :type no_noise_bias: bool :param no_noise_bias: flag saying if weight noise should be added to the bias as well """ updater_below = gater_below W_hh = self.W_hh G_hh = self.G_hh R_hh = self.R_hh A_cp = self.A_cp B_hp = self.B_hp D_pe = self.D_pe # The code works only with 3D tensors cndim = c.ndim if cndim == 2: c = c[:, None, :] # Warning: either source_num or target_num should be equal, # or on of them sould be 1 (they have to broadcast) # for the following code to make any sense. source_len = c.shape[0] source_num = c.shape[1] target_num = state_before.shape[0] dim = self.n_hids # Form projection to the tanh layer from the previous hidden state # Shape: (source_len, target_num, dim) p_from_h = ReplicateLayer(source_len)(utils.dot(state_before, B_hp)).out # Form projection to the tanh layer from the source annotation. if not p_from_c: p_from_c = utils.dot(c, A_cp).reshape((source_len, source_num, dim)) # Sum projections - broadcasting happens at the dimension 1. p = p_from_h + p_from_c # Apply non-linearity and project to energy. energy = TT.exp(utils.dot(TT.tanh(p), D_pe)).reshape((source_len, target_num)) if c_mask: # This is used for batches only, that is target_num == source_num energy *= c_mask # Calculate energy sums. normalizer = energy.sum(axis=0) # Get probabilities. probs = energy / normalizer # Calculate weighted sums of source annotations. # If target_num == 1, c shoulds broadcasted at the 1st dimension. # Probabilities are broadcasted at the 2nd dimension. ctx = (c * probs.dimshuffle(0, 1, 'x')).sum(axis=0) state_below += self.c_inputer(ctx).out reseter_below += self.c_reseter(ctx).out updater_below += self.c_updater(ctx).out # Reset gate: # optionally reset the hidden state. reseter = self.reseter_activation(TT.dot(state_before, R_hh) + reseter_below) reseted_state_before = reseter * state_before # Feed the input to obtain potential new state. preactiv = TT.dot(reseted_state_before, W_hh) + state_below h = self.activation(preactiv) # Update gate: # optionally reject the potential new state and use the new one. updater = self.updater_activation(TT.dot(state_before, G_hh) + updater_below) h = updater * h + (1 - updater) * state_before if mask is not None: if h.ndim == 2 and mask.ndim == 1: mask = mask.dimshuffle(0, 'x') h = mask * h + (1 - mask) * state_before results = [h, ctx] if return_alignment: results += [probs] return results def fprop(self, state_below, mask=None, init_state=None, gater_below=None, reseter_below=None, c=None, c_mask=None, nsteps=None, batch_size=None, use_noise=True, truncate_gradient=-1, no_noise_bias=False, return_alignment=False): updater_below = gater_below if theano.config.floatX == 'float32': floatX = numpy.float32 else: floatX = numpy.float64 if nsteps is None: nsteps = state_below.shape[0] if batch_size and batch_size != 1: nsteps = nsteps / batch_size if batch_size is None and state_below.ndim == 3: batch_size = state_below.shape[1] if state_below.ndim == 2 and \ (not isinstance(batch_size, int) or batch_size > 1): state_below = state_below.reshape((nsteps, batch_size, self.n_in)) if updater_below: updater_below = updater_below.reshape((nsteps, batch_size, self.n_in)) if reseter_below: reseter_below = reseter_below.reshape((nsteps, batch_size, self.n_in)) if not init_state: if not isinstance(batch_size, int) or batch_size != 1: init_state = TT.alloc(floatX(0), batch_size, self.n_hids) else: init_state = TT.alloc(floatX(0), self.n_hids) p_from_c = utils.dot(c, self.A_cp).reshape( (c.shape[0], c.shape[1], self.n_hids)) if mask: sequences = [state_below, mask, updater_below, reseter_below] non_sequences = [c, c_mask, p_from_c] # seqs | out | non_seqs fn = lambda x, m, g, r, h, c1, cm, pc: self.step_fprop(x, h, mask=m, gater_below=g, reseter_below=r, c=c1, p_from_c=pc, c_mask=cm, use_noise=use_noise, no_noise_bias=no_noise_bias, return_alignment=return_alignment) else: sequences = [state_below, updater_below, reseter_below] non_sequences = [c, p_from_c] # seqs | out | non_seqs fn = lambda x, g, r, h, c1, pc: self.step_fprop(x, h, gater_below=g, reseter_below=r, c=c1, p_from_c=pc, use_noise=use_noise, no_noise_bias=no_noise_bias, return_alignment=return_alignment) outputs_info = [init_state, None] if return_alignment: outputs_info.append(None) rval, updates = theano.scan(fn, sequences=sequences, non_sequences=non_sequences, outputs_info=outputs_info, name='layer_%s' % self.name, truncate_gradient=truncate_gradient, n_steps=nsteps) self.out = rval self.rval = rval self.updates = updates return self.out class ReplicateLayer(Layer): def __init__(self, n_times): self.n_times = n_times super(ReplicateLayer, self).__init__(0, 0, None) def fprop(self, x): # This is black magic based on broadcasting, # that's why variable names don't make any sense. # copy x n_times times a = TT.shape_padleft(x) padding = [1] * x.ndim b = TT.alloc(numpy.float32(1), self.n_times, *padding) self.out = a * b return self.out class PadLayer(Layer): def __init__(self, required): self.required = required Layer.__init__(self, 0, 0, None) def fprop(self, x): if_longer = x[:self.required] padding = ReplicateLayer(TT.max([1, self.required - x.shape[0]]))(x[-1]).out if_shorter = TT.concatenate([x, padding]) diff = x.shape[0] - self.required self.out = ifelse(diff < 0, if_shorter, if_longer) return self.out class ZeroLayer(Layer): def fprop(self, x): self.out = TT.zeros(x.shape) return self.out def none_if_zero(x): if x == 0: return None return x class Maxout(object): def __init__(self, maxout_part): self.maxout_part = maxout_part def __call__(self, x): shape = x.shape if x.ndim == 1: shape1 = TT.cast(shape[0] / self.maxout_part, 'int64') shape2 = TT.cast(self.maxout_part, 'int64') x = x.reshape([shape1, shape2]) x = x.max(1) else: shape1 = TT.cast(shape[1] / self.maxout_part, 'int64') shape2 = TT.cast(self.maxout_part, 'int64') x = x.reshape([shape[0], shape1, shape2]) x = x.max(2) return x def prefix_lookup(state, p, s): if '%s_%s' % (p, s) in state: return state['%s_%s' % (p, s)] return state[s] class EncoderDecoderBase(object): def _create_embedding_layers(self): logger.debug("_create_embedding_layers") self.approx_embedder = MultiLayer( self.rng, n_in=self.state['n_sym_source'] if self.prefix.find("enc") >= 0 else self.state['n_sym_target'], n_hids=[self.state['rank_n_approx']], activation=[self.state['rank_n_activ']], name='{}_approx_embdr'.format(self.prefix), **self.default_kwargs) # We have 3 embeddings for each word in each level, # the one used as input, # the one used to control resetting gate, # the one used to control update gate. self.input_embedders = [lambda x: 0] * self.num_levels self.reset_embedders = [lambda x: 0] * self.num_levels self.update_embedders = [lambda x: 0] * self.num_levels embedder_kwargs = dict(self.default_kwargs) embedder_kwargs.update(dict( n_in=self.state['rank_n_approx'], n_hids=[self.state['dim'] * self.state['dim_mult']], activation=['lambda x:x'])) for level in range(self.num_levels): self.input_embedders[level] = MultiLayer( self.rng, name='{}_input_embdr_{}'.format(self.prefix, level), **embedder_kwargs) if prefix_lookup(self.state, self.prefix, 'rec_gating'): self.update_embedders[level] = MultiLayer( self.rng, learn_bias=False, name='{}_update_embdr_{}'.format(self.prefix, level), **embedder_kwargs) if prefix_lookup(self.state, self.prefix, 'rec_reseting'): self.reset_embedders[level] = MultiLayer( self.rng, learn_bias=False, name='{}_reset_embdr_{}'.format(self.prefix, level), **embedder_kwargs) def _create_inter_level_layers(self): logger.debug("_create_inter_level_layers") inter_level_kwargs = dict(self.default_kwargs) inter_level_kwargs.update( n_in=self.state['dim'], n_hids=self.state['dim'] * self.state['dim_mult'], activation=['lambda x:x']) self.inputers = [0] * self.num_levels self.reseters = [0] * self.num_levels self.updaters = [0] * self.num_levels for level in range(1, self.num_levels): self.inputers[level] = MultiLayer(self.rng, name="{}_inputer_{}".format(self.prefix, level), **inter_level_kwargs) if prefix_lookup(self.state, self.prefix, 'rec_reseting'): self.reseters[level] = MultiLayer(self.rng, name="{}_reseter_{}".format(self.prefix, level), **inter_level_kwargs) if prefix_lookup(self.state, self.prefix, 'rec_gating'): self.updaters[level] = MultiLayer(self.rng, name="{}_updater_{}".format(self.prefix, level), **inter_level_kwargs) def _create_transition_layers(self): logger.debug("_create_transition_layers") self.transitions = [] rec_layer = eval(prefix_lookup(self.state, self.prefix, 'rec_layer')) add_args = dict() if rec_layer == RecurrentLayerWithSearch: add_args = dict(c_dim=self.state['c_dim']) for level in range(self.num_levels): self.transitions.append(rec_layer( self.rng, n_hids=self.state['dim'], activation=prefix_lookup(self.state, self.prefix, 'activ'), bias_scale=self.state['bias'], init_fn=(self.state['rec_weight_init_fn'] if not self.skip_init else "sample_zeros"), scale=prefix_lookup(self.state, self.prefix, 'rec_weight_scale'), weight_noise=self.state['weight_noise_rec'], dropout=self.state['dropout_rec'], gating=prefix_lookup(self.state, self.prefix, 'rec_gating'), gater_activation=prefix_lookup(self.state, self.prefix, 'rec_gater'), reseting=prefix_lookup(self.state, self.prefix, 'rec_reseting'), reseter_activation=prefix_lookup(self.state, self.prefix, 'rec_reseter'), name='{}_transition_{}'.format(self.prefix, level), **add_args)) class Encoder(EncoderDecoderBase): def __init__(self, state, rng, prefix='enc', skip_init=False): self.state = state self.rng = rng self.prefix = prefix self.skip_init = skip_init self.num_levels = self.state['encoder_stack'] # support multiple gating/memory units if 'dim_mult' not in self.state: self.state['dim_mult'] = 1. if 'hid_mult' not in self.state: self.state['hid_mult'] = 1. def create_layers(self): """ Create all elements of Encoder's computation graph""" self.default_kwargs = dict( init_fn=self.state['weight_init_fn'] if not self.skip_init else "sample_zeros", weight_noise=self.state['weight_noise'], scale=self.state['weight_scale']) self._create_embedding_layers() self._create_transition_layers() self._create_inter_level_layers() self._create_representation_layers() def _create_representation_layers(self): logger.debug("_create_representation_layers") # If we have a stack of RNN, then their last hidden states # are combined with a maxout layer. self.repr_contributors = [None] * self.num_levels for level in range(self.num_levels): self.repr_contributors[level] = MultiLayer( self.rng, n_in=self.state['dim'], n_hids=[self.state['dim'] * self.state['maxout_part']], activation=['lambda x: x'], name="{}_repr_contrib_{}".format(self.prefix, level), **self.default_kwargs) self.repr_calculator = UnaryOp( activation=eval(self.state['unary_activ']), name="{}_repr_calc".format(self.prefix)) def build_encoder(self, x, x_mask=None, use_noise=False, approx_embeddings=None, return_hidden_layers=False): """Create the computational graph of the RNN Encoder :param x: input variable, either vector of word indices or matrix of word indices, where each column is a sentence :param x_mask: when x is a matrix and input sequences are of variable length, this 1/0 matrix is used to specify the matrix positions where the input actually is :param use_noise: turns on addition of noise to weights (UNTESTED) :param approx_embeddings: forces encoder to use given embeddings instead of its own :param return_hidden_layers: if True, encoder returns all the activations of the hidden layer (WORKS ONLY IN NON-HIERARCHICAL CASE) """ # Low rank embeddings of all the input words. # Shape in case of matrix input: # (max_seq_len * batch_size, rank_n_approx), # where max_seq_len is the maximum length of batch sequences. # Here and later n_words = max_seq_len * batch_size. # Shape in case of vector input: # (seq_len, rank_n_approx) if not approx_embeddings: approx_embeddings = self.approx_embedder(x) # Low rank embeddings are projected to contribute # to input, reset and update signals. # All the shapes: (n_words, dim) input_signals = [] reset_signals = [] update_signals = [] for level in range(self.num_levels): input_signals.append(self.input_embedders[level](approx_embeddings)) update_signals.append(self.update_embedders[level](approx_embeddings)) reset_signals.append(self.reset_embedders[level](approx_embeddings)) # Hidden layers. # Shape in case of matrix input: (max_seq_len, batch_size, dim) # Shape in case of vector input: (seq_len, dim) hidden_layers = [] for level in range(self.num_levels): # Each hidden layer (except the bottom one) receives # input, reset and update signals from below. # FIXME above statement is not correct, should be: # each input, reset and update gate, # except for those at time 0, # takes previous hidden layer as input # All the shapes: (n_words, dim) if level > 0: # encoder input_signals[level] += self.inputers[level](hidden_layers[-1]) update_signals[level] += self.updaters[level](hidden_layers[-1]) reset_signals[level] += self.reseters[level](hidden_layers[-1]) # transitions are RecurrentLayers hidden_layers.append(self.transitions[level]( input_signals[level], nsteps=x.shape[0], batch_size=x.shape[1] if x.ndim == 2 else 1, mask=x_mask, gater_below=none_if_zero(update_signals[level]), reseter_below=none_if_zero(reset_signals[level]), use_noise=use_noise)) if return_hidden_layers: assert self.state['encoder_stack'] == 1 return hidden_layers[0] # If we no stack of RNN but only a usual one, # then the last hidden state is used as a representation. # Return value shape in case of matrix input: # (batch_size, dim) # Return value shape in case of vector input: # (dim,) if self.num_levels == 1 or self.state['take_top']: c = LastState()(hidden_layers[-1]) if c.out.ndim == 2: c.out = c.out[:, :self.state['dim']] else: c.out = c.out[:self.state['dim']] return c # If we have a stack of RNN, then their last hidden states # are combined with a maxout layer. # Return value however has the same shape. contributions = [] for level in range(self.num_levels): contributions.append(self.repr_contributors[level]( LastState()(hidden_layers[level]))) # I do not know a good starting value for sum # concat all num_levels last hidden layers (after repr_contributors) c = self.repr_calculator(sum(contributions[1:], contributions[0])) return c class Decoder(EncoderDecoderBase): EVALUATION = 0 SAMPLING = 1 BEAM_SEARCH = 2 def __init__(self, state, rng, prefix='dec', skip_init=False, compute_alignment=False): self.state = state self.rng = rng self.prefix = prefix self.skip_init = skip_init self.compute_alignment = compute_alignment # Actually there is a problem here - # we don't make difference between number of input layers # and outputs layers. self.num_levels = self.state['decoder_stack'] if 'dim_mult' not in self.state: self.state['dim_mult'] = 1. def create_layers(self): """ Create all elements of Decoder's computation graph""" self.default_kwargs = dict( init_fn=self.state['weight_init_fn'] if not self.skip_init else "sample_zeros", weight_noise=self.state['weight_noise'], scale=self.state['weight_scale']) self._create_embedding_layers() self._create_transition_layers() self._create_inter_level_layers() self._create_initialization_layers() self._create_decoding_layers() self._create_readout_layers() if self.state['search']: assert self.num_levels == 1 self.transitions[0].set_decoding_layers( self.decode_inputers[0], self.decode_reseters[0], self.decode_updaters[0]) def _create_initialization_layers(self): logger.debug("_create_initialization_layers") self.initializers = [ZeroLayer()] * self.num_levels if self.state['bias_code']: for level in range(self.num_levels): self.initializers[level] = MultiLayer( self.rng, n_in=self.state['dim'], n_hids=[self.state['dim'] * self.state['hid_mult']], activation=[prefix_lookup(self.state, 'dec', 'activ')], bias_scale=[self.state['bias']], name='{}_initializer_{}'.format(self.prefix, level), **self.default_kwargs) def _create_decoding_layers(self): logger.debug("_create_decoding_layers") self.decode_inputers = [lambda x: 0] * self.num_levels self.decode_reseters = [lambda x: 0] * self.num_levels self.decode_updaters = [lambda x: 0] * self.num_levels # self.back_decode_inputers = [lambda x : 0] * self.num_levels # self.back_decode_reseters = [lambda x : 0] * self.num_levels # self.back_decode_updaters = [lambda x : 0] * self.num_levels decoding_kwargs = dict(self.default_kwargs) decoding_kwargs.update(dict( n_in=self.state['c_dim'], n_hids=self.state['dim'] * self.state['dim_mult'], activation=['lambda x:x'], learn_bias=False)) if self.state['decoding_inputs']: # use context from encoder for level in range(self.num_levels): # Input contributions self.decode_inputers[level] = MultiLayer( self.rng, name='{}_dec_inputter_{}'.format(self.prefix, level), **decoding_kwargs) # Update gate contributions if prefix_lookup(self.state, 'dec', 'rec_gating'): self.decode_updaters[level] = MultiLayer( self.rng, name='{}_dec_updater_{}'.format(self.prefix, level), **decoding_kwargs) # Reset gate contributions if prefix_lookup(self.state, 'dec', 'rec_reseting'): self.decode_reseters[level] = MultiLayer( self.rng, name='{}_dec_reseter_{}'.format(self.prefix, level), **decoding_kwargs) def _create_readout_layers(self): softmax_layer = self.state['softmax_layer'] if 'softmax_layer' in self.state \ else 'SoftmaxLayer' logger.debug("_create_readout_layers") readout_kwargs = dict(self.default_kwargs) readout_kwargs.update(dict( n_hids=self.state['dim'], activation='lambda x: x', )) self.repr_readout = MultiLayer( self.rng, n_in=self.state['c_dim'], learn_bias=False, name='{}_repr_readout'.format(self.prefix), **readout_kwargs) # Attention - this is the only readout layer # with trainable bias. Should be careful with that. self.hidden_readouts = [None] * self.num_levels for level in range(self.num_levels): self.hidden_readouts[level] = MultiLayer( self.rng, n_in=self.state['dim'], name='{}_hid_readout_{}'.format(self.prefix, level), **readout_kwargs) self.prev_word_readout = 0 if self.state['bigram']: self.prev_word_readout = MultiLayer( self.rng, n_in=self.state['rank_n_approx'], n_hids=self.state['dim'], activation=['lambda x:x'], learn_bias=False, name='{}_prev_readout_{}'.format(self.prefix, level), **self.default_kwargs) if self.state['deep_out']: act_layer = UnaryOp(activation=eval(self.state['unary_activ'])) drop_layer = DropOp(rng=self.rng, dropout=self.state['dropout']) self.output_nonlinearities = [act_layer, drop_layer] self.output_layer = eval(softmax_layer)( self.rng, n_in=self.state['dim'] / self.state['maxout_part'], n_out=self.state['n_sym_target'], sparsity=-1, rank_n_approx=self.state['rank_n_approx'], name='{}_deep_softmax'.format(self.prefix), use_nce=self.state['use_nce'] if 'use_nce' in self.state else False, **self.default_kwargs) else: self.output_nonlinearities = [] self.output_layer = eval(softmax_layer)( self.rng, n_in=self.state['dim'], n_out=self.state['n_sym_target'], sparsity=-1, rank_n_approx=self.state['rank_n_approx'], name='dec_softmax', sum_over_time=True, use_nce=self.state['use_nce'] if 'use_nce' in self.state else False, **self.default_kwargs) def build_decoder(self, c, y, c_mask=None, y_mask=None, step_num=None, mode=EVALUATION, given_init_states=None, T=1): """Create the computational graph of the RNN Decoder. :param c: representations produced by an encoder. (n_samples, dim) matrix if mode == sampling or (max_seq_len, batch_size, dim) matrix if mode == evaluation :param c_mask: if mode == evaluation a 0/1 matrix identifying valid positions in c :param y: if mode == evaluation target sequences, matrix of word indices of shape (max_seq_len, batch_size), where each column is a sequence if mode != evaluation a vector of previous words of shape (n_samples,) :param y_mask: if mode == evaluation a 0/1 matrix determining lengths of the target sequences, must be None otherwise :param mode: chooses on of three modes: evaluation, sampling and beam_search :param given_init_states: for sampling and beam_search. A list of hidden states matrices for each layer, each matrix is (n_samples, dim) :param T: sampling temperature """ # Check parameter consistency if mode == Decoder.EVALUATION: assert not given_init_states else: assert not y_mask assert given_init_states if mode == Decoder.BEAM_SEARCH: assert T == 1 # For log-likelihood evaluation the representation is replicated for conveniency # not when backward RNN is used # Shape if mode == evaluation # (max_seq_len, batch_size, dim) # Shape if mode != evaluation # (n_samples, dim) if not self.state['search']: if mode == Decoder.EVALUATION: c = PadLayer(y.shape[0])(c) else: assert step_num c_pos = TT.minimum(step_num, c.shape[0] - 1) # Low rank embeddings of all the input words. # Shape if mode == evaluation # (n_words, rank_n_approx), # Shape if mode != evaluation # (n_samples, rank_n_approx) approx_embeddings = self.approx_embedder(y) # Low rank embeddings are projected to contribute # to input, reset and update signals. # All the shapes if mode == evaluation: # (n_words, dim) # where: n_words = max_seq_len * batch_size # All the shape if mode != evaluation: # (n_samples, dim) input_signals = [] reset_signals = [] update_signals = [] for level in range(self.num_levels): # Contributions directly from input words. input_signals.append(self.input_embedders[level](approx_embeddings)) update_signals.append(self.update_embedders[level](approx_embeddings)) reset_signals.append(self.reset_embedders[level](approx_embeddings)) # Contributions from the encoded source sentence. if not self.state['search']: current_c = c if mode == Decoder.EVALUATION else c[c_pos] input_signals[level] += self.decode_inputers[level](current_c) update_signals[level] += self.decode_updaters[level](current_c) reset_signals[level] += self.decode_reseters[level](current_c) # Hidden layers' initial states. # Shapes if mode == evaluation: # (batch_size, dim) # Shape if mode != evaluation: # (n_samples, dim) init_states = given_init_states if not init_states: init_states = [] for level in range(self.num_levels): init_c = c[0, :, -self.state['dim']:] init_states.append(self.initializers[level](init_c)) # Hidden layers' states. # Shapes if mode == evaluation: # (seq_len, batch_size, dim) # Shapes if mode != evaluation: # (n_samples, dim) hidden_layers = [] contexts = [] # Default value for alignment must be smth computable alignment = TT.zeros((1,)) for level in range(self.num_levels): if level > 0: # decoder input_signals[level] += self.inputers[level](hidden_layers[level - 1]) update_signals[level] += self.updaters[level](hidden_layers[level - 1]) reset_signals[level] += self.reseters[level](hidden_layers[level - 1]) add_kwargs = (dict(state_before=init_states[level]) if mode != Decoder.EVALUATION else dict(init_state=init_states[level], batch_size=y.shape[1] if y.ndim == 2 else 1, nsteps=y.shape[0])) if self.state['search']: add_kwargs['c'] = c add_kwargs['c_mask'] = c_mask add_kwargs['return_alignment'] = self.compute_alignment if mode != Decoder.EVALUATION: add_kwargs['step_num'] = step_num result = self.transitions[level]( input_signals[level], mask=y_mask, gater_below=none_if_zero(update_signals[level]), reseter_below=none_if_zero(reset_signals[level]), one_step=mode != Decoder.EVALUATION, use_noise=mode == Decoder.EVALUATION, **add_kwargs) if self.state['search']: if self.compute_alignment: # This implicitly wraps each element of result.out with a Layer to keep track of the parameters. # It is equivalent to h=result[0], ctx=result[1] etc. h, ctx, alignment = result if mode == Decoder.EVALUATION: alignment = alignment.out else: # This implicitly wraps each element of result.out with a Layer to keep track of the parameters. # It is equivalent to h=result[0], ctx=result[1] h, ctx = result else: h = result if mode == Decoder.EVALUATION: ctx = c else: ctx = ReplicateLayer(given_init_states[0].shape[0])(c[c_pos]).out hidden_layers.append(h) contexts.append(ctx) # In hidden_layers we do no have the initial state, but we need it. # Instead of it we have the last one, which we do not need. # So what we do is discard the last one and prepend the initial one. if mode == Decoder.EVALUATION: for level in range(self.num_levels): hidden_layers[level].out = TT.concatenate([ TT.shape_padleft(init_states[level].out), hidden_layers[level].out])[:-1] # The output representation to be fed in softmax. # Shape if mode == evaluation # (n_words, dim_r) # Shape if mode != evaluation # (n_samples, dim_r) # ... where dim_r depends on 'deep_out' option. readout = self.repr_readout(contexts[0]) for level in range(self.num_levels): if mode != Decoder.EVALUATION: read_from = init_states[level] else: read_from = hidden_layers[level] read_from_var = read_from if type(read_from) == theano.tensor.TensorVariable else read_from.out if read_from_var.ndim == 3: read_from_var = read_from_var[:, :, :self.state['dim']] else: read_from_var = read_from_var[:, :self.state['dim']] if type(read_from) != theano.tensor.TensorVariable: read_from.out = read_from_var else: read_from = read_from_var readout += self.hidden_readouts[level](read_from) if self.state['bigram']: if mode != Decoder.EVALUATION: # state['check_first_word'] should always be true check_first_word = (y > 0 if self.state['check_first_word'] else TT.ones((y.shape[0]), dtype="float32")) # padright is necessary as we want to multiply each row with a certain scalar readout += TT.shape_padright(check_first_word) * self.prev_word_readout(approx_embeddings).out else: if y.ndim == 1: readout += Shift()(self.prev_word_readout(approx_embeddings).reshape( (y.shape[0], 1, self.state['dim']))) else: # This place needs explanation. When prev_word_readout is applied to # approx_embeddings the resulting shape is # (n_batches * sequence_length, repr_dimensionality). We first # transform it into 3D tensor to shift forward in time. Then # reshape it back. readout += Shift()(self.prev_word_readout(approx_embeddings).reshape( (y.shape[0], y.shape[1], self.state['dim']))).reshape( readout.out.shape) for fun in self.output_nonlinearities: readout = fun(readout) if mode == Decoder.SAMPLING: sample = self.output_layer.get_sample( state_below=readout, temp=T) # Current SoftmaxLayer.get_cost is stupid, # that's why we have to reshape a lot. self.output_layer.get_cost( state_below=readout.out, temp=T, target=sample) log_prob = self.output_layer.cost_per_sample return [sample] + [log_prob] + hidden_layers elif mode == Decoder.BEAM_SEARCH: return self.output_layer( state_below=readout.out, temp=T).out elif mode == Decoder.EVALUATION: return (self.output_layer.train( state_below=readout, target=y, mask=y_mask, reg=None), alignment) else: raise Exception("Unknown mode for build_decoder") def sampling_step(self, *args): """ Implements one step of sampling """ """ Args are necessary since the number (and the order) of arguments can vary """ args = iter(args) # Arguments that correspond to scan's "sequences" parameteter: step_num = next(args) assert step_num.ndim == 0 # Arguments that correspond to scan's "outputs" parameteter: prev_word = next(args) assert prev_word.ndim == 1 # skip the previous word log probability assert next(args).ndim == 1 prev_hidden_states = [next(args) for k in range(self.num_levels)] assert prev_hidden_states[0].ndim == 2 # Arguments that correspond to scan's "non_sequences": c = next(args) assert c.ndim == 2 T = next(args) assert T.ndim == 0 decoder_args = dict(given_init_states=prev_hidden_states, T=T, c=c) sample, log_prob = self.build_decoder(y=prev_word, step_num=step_num, mode=Decoder.SAMPLING, **decoder_args)[:2] hidden_states = self.build_decoder(y=sample, step_num=step_num, mode=Decoder.SAMPLING, **decoder_args)[2:] return [sample, log_prob] + hidden_states def build_initializers(self, c): return [init(c).out for init in self.initializers] def build_sampler(self, n_samples, n_steps, T, c): states = [TT.zeros(shape=(n_samples,), dtype='int64'), TT.zeros(shape=(n_samples,), dtype='float32')] init_c = c[0, -self.state['dim']:] states += [ReplicateLayer(n_samples)(init(init_c).out).out for init in self.initializers] if not self.state['search']: c = PadLayer(n_steps)(c).out # Pad with final states non_sequences = [c, T] outputs, updates = theano.scan(self.sampling_step, outputs_info=states, non_sequences=non_sequences, sequences=[TT.arange(n_steps, dtype="int64")], n_steps=n_steps, name="{}_sampler_scan".format(self.prefix)) return (outputs[0], outputs[1]), updates def build_next_probs_predictor(self, c, step_num, y, init_states): return self.build_decoder(c, y, mode=Decoder.BEAM_SEARCH, given_init_states=init_states, step_num=step_num) def build_next_states_computer(self, c, step_num, y, init_states): return self.build_decoder(c, y, mode=Decoder.SAMPLING, given_init_states=init_states, step_num=step_num)[2:] class RNNEncoderDecoder(object): """This class encapsulates the translation model. The expected usage pattern is: >>> encdec = RNNEncoderDecoder(...) >>> encdec.build(...) >>> useful_smth = encdec.create_useful_smth(...) Functions from the create_smth family (except create_lm_model) when called complile and return functions that do useful stuff. """ def __init__(self, state, rng, skip_init=False, compute_alignment=False): """Constructor. :param state: A state in the usual groundhog sense. :param rng: Random number generator. Something like numpy.random.RandomState(seed). :param skip_init: If True, all the layers are initialized with zeros. Saves time spent on parameter initialization if they are loaded later anyway. :param compute_alignment: If True, the alignment is returned by the decoder. """ self.state = state self.rng = rng self.skip_init = skip_init self.compute_alignment = compute_alignment def build(self): logger.debug("Create input variables") self.x = TT.lmatrix('x') self.x_mask = TT.matrix('x_mask') self.y = TT.lmatrix('y') self.y_mask = TT.matrix('y_mask') self.inputs = [self.x, self.y, self.x_mask, self.y_mask] # Annotation for the log-likelihood computation training_c_components = [] logger.debug("Create forward encoder") self.encoder = Encoder(self.state, self.rng, prefix="enc", skip_init=self.skip_init) self.encoder.create_layers() logger.debug("Build forward encoding computation graph") forward_training_c = self.encoder.build_encoder( self.x, self.x_mask, use_noise=True, return_hidden_layers=True) logger.debug("Create backward encoder") self.backward_encoder = Encoder(self.state, self.rng, prefix="back_enc", skip_init=self.skip_init) self.backward_encoder.create_layers() logger.debug("Build backward encoding computation graph") backward_training_c = self.backward_encoder.build_encoder( self.x[::-1], self.x_mask[::-1], use_noise=True, approx_embeddings=self.encoder.approx_embedder(self.x[::-1]), return_hidden_layers=True) # Reverse time for backward representations. backward_training_c.out = backward_training_c.out[::-1] if self.state['forward']: training_c_components.append(forward_training_c) if self.state['last_forward']: training_c_components.append( ReplicateLayer(self.x.shape[0])(forward_training_c[-1])) if self.state['backward']: training_c_components.append(backward_training_c) if self.state['last_backward']: training_c_components.append(ReplicateLayer(self.x.shape[0]) (backward_training_c[0])) self.state['c_dim'] = len(training_c_components) * self.state['dim'] logger.debug("Create decoder") self.decoder = Decoder(self.state, self.rng, skip_init=self.skip_init, compute_alignment=self.compute_alignment) self.decoder.create_layers() logger.debug("Build log-likelihood computation graph") self.predictions, self.alignment = self.decoder.build_decoder( c=Concatenate(axis=2)(*training_c_components), c_mask=self.x_mask, y=self.y, y_mask=self.y_mask) # Annotation for sampling sampling_c_components = [] logger.debug("Build sampling computation graph") self.sampling_x = TT.lvector("sampling_x") self.n_samples = TT.lscalar("n_samples") self.n_steps = TT.lscalar("n_steps") self.T = TT.scalar("T") self.forward_sampling_c = self.encoder.build_encoder( self.sampling_x, return_hidden_layers=True).out self.backward_sampling_c = self.backward_encoder.build_encoder( self.sampling_x[::-1], approx_embeddings=self.encoder.approx_embedder(self.sampling_x[::-1]), return_hidden_layers=True).out[::-1] if self.state['forward']: sampling_c_components.append(self.forward_sampling_c) if self.state['last_forward']: sampling_c_components.append(ReplicateLayer(self.sampling_x.shape[0]) (self.forward_sampling_c[-1])) if self.state['backward']: sampling_c_components.append(self.backward_sampling_c) if self.state['last_backward']: sampling_c_components.append(ReplicateLayer(self.sampling_x.shape[0]) (self.backward_sampling_c[0])) self.sampling_c = Concatenate(axis=1)(*sampling_c_components).out (self.sample, self.sample_log_prob), self.sampling_updates = \ self.decoder.build_sampler(self.n_samples, self.n_steps, self.T, c=self.sampling_c) logger.debug("Create auxiliary variables") self.c = TT.matrix("c") self.step_num = TT.lscalar("step_num") self.current_states = [TT.matrix("cur_{}".format(i)) for i in range(self.decoder.num_levels)] self.gen_y = TT.lvector("gen_y") def create_lm_model(self): if hasattr(self, 'lm_model'): return self.lm_model self.lm_model = LM_Model( cost_layer=self.predictions, sample_fn=self.create_sampler(), weight_noise_amount=self.state['weight_noise_amount'], indx_word=self.state['indx_word_target'], indx_word_src=self.state['indx_word'], rng=self.rng) self.lm_model.load_dict(self.state) logger.debug("Model params:\n{}".format( pprint.pformat(sorted([p.name for p in self.lm_model.params])))) return self.lm_model def create_representation_computer(self): if not hasattr(self, "repr_fn"): self.repr_fn = theano.function( inputs=[self.sampling_x], outputs=[self.sampling_c], name="repr_fn") return self.repr_fn def create_initializers(self): if not hasattr(self, "init_fn"): init_c = self.sampling_c[0, -self.state['dim']:] self.init_fn = theano.function( inputs=[self.sampling_c], outputs=self.decoder.build_initializers(init_c), name="init_fn") return self.init_fn def create_sampler(self, many_samples=False): if hasattr(self, 'sample_fn'): return self.sample_fn logger.debug("Compile sampler") self.sample_fn = theano.function( inputs=[self.n_samples, self.n_steps, self.T, self.sampling_x], outputs=[self.sample, self.sample_log_prob], updates=self.sampling_updates, name="sample_fn") if not many_samples: def sampler(*args): return map(lambda x: x.squeeze(), self.sample_fn(1, *args)) return sampler return self.sample_fn def create_scorer(self, batch=False): if not hasattr(self, 'score_fn'): logger.debug("Compile scorer") self.score_fn = theano.function( inputs=self.inputs, outputs=[-self.predictions.cost_per_sample], name="score_fn") if batch: return self.score_fn def scorer(x, y): x_mask = numpy.ones(x.shape[0], dtype="float32") y_mask = numpy.ones(y.shape[0], dtype="float32") return self.score_fn(x[:, None], y[:, None], x_mask[:, None], y_mask[:, None]) return scorer def create_next_probs_computer(self): if not hasattr(self, 'next_probs_fn'): self.next_probs_fn = theano.function( inputs=[self.c, self.step_num, self.gen_y] + self.current_states, outputs=[self.decoder.build_next_probs_predictor( self.c, self.step_num, self.gen_y, self.current_states)], name="next_probs_fn", on_unused_input='warn') return self.next_probs_fn def create_next_states_computer(self): if not hasattr(self, 'next_states_fn'): self.next_states_fn = theano.function( inputs=[self.c, self.step_num, self.gen_y] + self.current_states, outputs=self.decoder.build_next_states_computer( self.c, self.step_num, self.gen_y, self.current_states), name="next_states_fn", on_unused_input='warn') return self.next_states_fn def create_probs_computer(self, return_alignment=False): if not hasattr(self, 'probs_fn'): logger.debug("Compile probs computer") self.probs_fn = theano.function( inputs=self.inputs, outputs=[self.predictions.word_probs, self.alignment], name="probs_fn") def probs_computer(x, y): x_mask = numpy.ones(x.shape[0], dtype="float32") y_mask = numpy.ones(y.shape[0], dtype="float32") probs, alignment = self.probs_fn(x[:, None], y[:, None], x_mask[:, None], y_mask[:, None]) if return_alignment: return probs, alignment else: return probs return probs_computer def parse_input(state, word2idx, line, raise_unk=False, idx2word=None, unk_sym=-1, null_sym=-1): if unk_sym < 0: unk_sym = state['unk_sym_source'] if null_sym < 0: null_sym = state['null_sym_source'] seqin = list(line.strip().decode('utf-8')) seqlen = len(seqin) seq = numpy.zeros(seqlen + 1, dtype='int64') for idx, sx in enumerate(seqin): seq[idx] = word2idx.get(sx, unk_sym) if seq[idx] >= state['n_sym_source']: seq[idx] = unk_sym if seq[idx] == unk_sym: raise Exception("Unknown word {}".format(sx)) seq[-1] = null_sym if idx2word: idx2word[null_sym] = '<eos>' idx2word[unk_sym] = state['oov'] parsed_in = [idx2word[sx] for sx in seq] return seq, " ".join(parsed_in) return seq, seqin
{ "content_hash": "adc948014319680506ea3020c59c1686", "timestamp": "", "source": "github", "line_count": 1552, "max_line_length": 120, "avg_line_length": 40.22422680412371, "alnum_prop": 0.5380438264881143, "repo_name": "Komzpa/GroundHog", "id": "762b1fec70127219c8e667894370a0efd0086c2a", "size": "62428", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "experiments/nmt/encdec.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "9676" }, { "name": "PHP", "bytes": "372" }, { "name": "Perl", "bytes": "32592" }, { "name": "Python", "bytes": "492146" }, { "name": "Shell", "bytes": "3211" }, { "name": "Smalltalk", "bytes": "1892" } ], "symlink_target": "" }
from rest_framework import response, permissions from rest_framework.renderers import CoreJSONRenderer from rest_framework.views import APIView from drf_openapi.codec import OpenAPIRenderer, SwaggerUIRenderer from drf_openapi.entities import OpenApiSchemaGenerator class SchemaView(APIView): renderer_classes = (CoreJSONRenderer, SwaggerUIRenderer, OpenAPIRenderer) permission_classes = (permissions.IsAdminUser,) url = '' title = 'API Documentation' def get(self, request, version): generator = OpenApiSchemaGenerator( version=version, url=self.url, title=self.title ) return response.Response(generator.get_schema(request))
{ "content_hash": "2efbf4af2dd81446a37be4b3c8e07a70", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 77, "avg_line_length": 33.80952380952381, "alnum_prop": 0.7323943661971831, "repo_name": "limdauto/drf_openapi", "id": "c9cf8c04661671227d57d0628e7602d8f8966065", "size": "725", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "drf_openapi/views.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "480" }, { "name": "Makefile", "bytes": "2285" }, { "name": "Python", "bytes": "33403" } ], "symlink_target": "" }
from pymongo import MongoClient from bson import ObjectId import os client = MongoClient('localhost', 27017) root = client.root print("Connected to db") basedir = os.getcwd() for dir in os.listdir(basedir): if dir != 'insert-image-names.py': print(dir) dbQuery1 = root.grounds.find({'_id': ObjectId(dir)}) grounds = list(dbQuery1) print(grounds) images = {} for subdir in os.listdir(dir): if os.path.isdir(os.path.join(dir, subdir)): print(subdir) images[subdir] = [] for file in os.listdir(os.path.join(dir, subdir)): print(file) images[subdir].append(file) print(images) root.grounds.update( { "_id": ObjectId(dir) }, { "$set": { "images": images } } ) print("Updated: " + dir)
{ "content_hash": "3b1dde5027334eedf1b9cfacd7e92ef0", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 54, "avg_line_length": 19.973684210526315, "alnum_prop": 0.6337285902503293, "repo_name": "nishank-jain/profile", "id": "fa31c8ac3096c124914e786458742080a5d1df0f", "size": "759", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "migrations/insert-image-names.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "229751" }, { "name": "HTML", "bytes": "110713" }, { "name": "JavaScript", "bytes": "223689" }, { "name": "Python", "bytes": "4656" } ], "symlink_target": "" }
import unittest from typing import cast from pyspark.rdd import PythonEvalType from pyspark.sql import Row from pyspark.sql.functions import ( array, explode, col, lit, mean, sum, udf, pandas_udf, PandasUDFType, ) from pyspark.sql.types import ArrayType, TimestampType from pyspark.sql.utils import AnalysisException from pyspark.testing.sqlutils import ( ReusedSQLTestCase, have_pandas, have_pyarrow, pandas_requirement_message, pyarrow_requirement_message, ) from pyspark.testing.utils import QuietTest if have_pandas: import pandas as pd from pandas.testing import assert_frame_equal @unittest.skipIf( not have_pandas or not have_pyarrow, cast(str, pandas_requirement_message or pyarrow_requirement_message), ) class GroupedAggPandasUDFTests(ReusedSQLTestCase): @property def data(self): return ( self.spark.range(10) .toDF("id") .withColumn("vs", array([lit(i * 1.0) + col("id") for i in range(20, 30)])) .withColumn("v", explode(col("vs"))) .drop("vs") .withColumn("w", lit(1.0)) ) @property def python_plus_one(self): @udf("double") def plus_one(v): assert isinstance(v, (int, float)) return float(v + 1) return plus_one @property def pandas_scalar_plus_two(self): @pandas_udf("double", PandasUDFType.SCALAR) def plus_two(v): assert isinstance(v, pd.Series) return v + 2 return plus_two @property def pandas_agg_mean_udf(self): @pandas_udf("double", PandasUDFType.GROUPED_AGG) def avg(v): return v.mean() return avg @property def pandas_agg_sum_udf(self): @pandas_udf("double", PandasUDFType.GROUPED_AGG) def sum(v): return v.sum() return sum @property def pandas_agg_weighted_mean_udf(self): import numpy as np @pandas_udf("double", PandasUDFType.GROUPED_AGG) def weighted_mean(v, w): return np.average(v, weights=w) return weighted_mean def test_manual(self): df = self.data sum_udf = self.pandas_agg_sum_udf mean_udf = self.pandas_agg_mean_udf mean_arr_udf = pandas_udf( self.pandas_agg_mean_udf.func, ArrayType(self.pandas_agg_mean_udf.returnType), self.pandas_agg_mean_udf.evalType, ) result1 = ( df.groupby("id") .agg(sum_udf(df.v), mean_udf(df.v), mean_arr_udf(array(df.v))) .sort("id") ) expected1 = self.spark.createDataFrame( [ [0, 245.0, 24.5, [24.5]], [1, 255.0, 25.5, [25.5]], [2, 265.0, 26.5, [26.5]], [3, 275.0, 27.5, [27.5]], [4, 285.0, 28.5, [28.5]], [5, 295.0, 29.5, [29.5]], [6, 305.0, 30.5, [30.5]], [7, 315.0, 31.5, [31.5]], [8, 325.0, 32.5, [32.5]], [9, 335.0, 33.5, [33.5]], ], ["id", "sum(v)", "avg(v)", "avg(array(v))"], ) assert_frame_equal(expected1.toPandas(), result1.toPandas()) def test_basic(self): df = self.data weighted_mean_udf = self.pandas_agg_weighted_mean_udf # Groupby one column and aggregate one UDF with literal result1 = df.groupby("id").agg(weighted_mean_udf(df.v, lit(1.0))).sort("id") expected1 = df.groupby("id").agg(mean(df.v).alias("weighted_mean(v, 1.0)")).sort("id") assert_frame_equal(expected1.toPandas(), result1.toPandas()) # Groupby one expression and aggregate one UDF with literal result2 = df.groupby((col("id") + 1)).agg(weighted_mean_udf(df.v, lit(1.0))).sort(df.id + 1) expected2 = ( df.groupby((col("id") + 1)) .agg(mean(df.v).alias("weighted_mean(v, 1.0)")) .sort(df.id + 1) ) assert_frame_equal(expected2.toPandas(), result2.toPandas()) # Groupby one column and aggregate one UDF without literal result3 = df.groupby("id").agg(weighted_mean_udf(df.v, df.w)).sort("id") expected3 = df.groupby("id").agg(mean(df.v).alias("weighted_mean(v, w)")).sort("id") assert_frame_equal(expected3.toPandas(), result3.toPandas()) # Groupby one expression and aggregate one UDF without literal result4 = ( df.groupby((col("id") + 1).alias("id")).agg(weighted_mean_udf(df.v, df.w)).sort("id") ) expected4 = ( df.groupby((col("id") + 1).alias("id")) .agg(mean(df.v).alias("weighted_mean(v, w)")) .sort("id") ) assert_frame_equal(expected4.toPandas(), result4.toPandas()) def test_unsupported_types(self): with QuietTest(self.sc): with self.assertRaisesRegex(NotImplementedError, "not supported"): pandas_udf( lambda x: x, ArrayType(ArrayType(TimestampType())), PandasUDFType.GROUPED_AGG ) with QuietTest(self.sc): with self.assertRaisesRegex(NotImplementedError, "not supported"): @pandas_udf("mean double, std double", PandasUDFType.GROUPED_AGG) def mean_and_std_udf(v): return v.mean(), v.std() with QuietTest(self.sc): with self.assertRaisesRegex(NotImplementedError, "not supported"): @pandas_udf(ArrayType(TimestampType()), PandasUDFType.GROUPED_AGG) def mean_and_std_udf(v): # noqa: F811 return {v.mean(): v.std()} def test_alias(self): df = self.data mean_udf = self.pandas_agg_mean_udf result1 = df.groupby("id").agg(mean_udf(df.v).alias("mean_alias")) expected1 = df.groupby("id").agg(mean(df.v).alias("mean_alias")) assert_frame_equal(expected1.toPandas(), result1.toPandas()) def test_mixed_sql(self): """ Test mixing group aggregate pandas UDF with sql expression. """ df = self.data sum_udf = self.pandas_agg_sum_udf # Mix group aggregate pandas UDF with sql expression result1 = df.groupby("id").agg(sum_udf(df.v) + 1).sort("id") expected1 = df.groupby("id").agg(sum(df.v) + 1).sort("id") # Mix group aggregate pandas UDF with sql expression (order swapped) result2 = df.groupby("id").agg(sum_udf(df.v + 1)).sort("id") expected2 = df.groupby("id").agg(sum(df.v + 1)).sort("id") # Wrap group aggregate pandas UDF with two sql expressions result3 = df.groupby("id").agg(sum_udf(df.v + 1) + 2).sort("id") expected3 = df.groupby("id").agg(sum(df.v + 1) + 2).sort("id") assert_frame_equal(expected1.toPandas(), result1.toPandas()) assert_frame_equal(expected2.toPandas(), result2.toPandas()) assert_frame_equal(expected3.toPandas(), result3.toPandas()) def test_mixed_udfs(self): """ Test mixing group aggregate pandas UDF with python UDF and scalar pandas UDF. """ df = self.data plus_one = self.python_plus_one plus_two = self.pandas_scalar_plus_two sum_udf = self.pandas_agg_sum_udf # Mix group aggregate pandas UDF and python UDF result1 = df.groupby("id").agg(plus_one(sum_udf(df.v))).sort("id") expected1 = df.groupby("id").agg(plus_one(sum(df.v))).sort("id") # Mix group aggregate pandas UDF and python UDF (order swapped) result2 = df.groupby("id").agg(sum_udf(plus_one(df.v))).sort("id") expected2 = df.groupby("id").agg(sum(plus_one(df.v))).sort("id") # Mix group aggregate pandas UDF and scalar pandas UDF result3 = df.groupby("id").agg(sum_udf(plus_two(df.v))).sort("id") expected3 = df.groupby("id").agg(sum(plus_two(df.v))).sort("id") # Mix group aggregate pandas UDF and scalar pandas UDF (order swapped) result4 = df.groupby("id").agg(plus_two(sum_udf(df.v))).sort("id") expected4 = df.groupby("id").agg(plus_two(sum(df.v))).sort("id") # Wrap group aggregate pandas UDF with two python UDFs and use python UDF in groupby result5 = ( df.groupby(plus_one(df.id)).agg(plus_one(sum_udf(plus_one(df.v)))).sort("plus_one(id)") ) expected5 = ( df.groupby(plus_one(df.id)).agg(plus_one(sum(plus_one(df.v)))).sort("plus_one(id)") ) # Wrap group aggregate pandas UDF with two scala pandas UDF and user scala pandas UDF in # groupby result6 = ( df.groupby(plus_two(df.id)).agg(plus_two(sum_udf(plus_two(df.v)))).sort("plus_two(id)") ) expected6 = ( df.groupby(plus_two(df.id)).agg(plus_two(sum(plus_two(df.v)))).sort("plus_two(id)") ) assert_frame_equal(expected1.toPandas(), result1.toPandas()) assert_frame_equal(expected2.toPandas(), result2.toPandas()) assert_frame_equal(expected3.toPandas(), result3.toPandas()) assert_frame_equal(expected4.toPandas(), result4.toPandas()) assert_frame_equal(expected5.toPandas(), result5.toPandas()) assert_frame_equal(expected6.toPandas(), result6.toPandas()) def test_multiple_udfs(self): """ Test multiple group aggregate pandas UDFs in one agg function. """ df = self.data mean_udf = self.pandas_agg_mean_udf sum_udf = self.pandas_agg_sum_udf weighted_mean_udf = self.pandas_agg_weighted_mean_udf result1 = ( df.groupBy("id") .agg(mean_udf(df.v), sum_udf(df.v), weighted_mean_udf(df.v, df.w)) .sort("id") .toPandas() ) expected1 = ( df.groupBy("id") .agg(mean(df.v), sum(df.v), mean(df.v).alias("weighted_mean(v, w)")) .sort("id") .toPandas() ) assert_frame_equal(expected1, result1) def test_complex_groupby(self): df = self.data sum_udf = self.pandas_agg_sum_udf plus_one = self.python_plus_one plus_two = self.pandas_scalar_plus_two # groupby one expression result1 = df.groupby(df.v % 2).agg(sum_udf(df.v)) expected1 = df.groupby(df.v % 2).agg(sum(df.v)) # empty groupby result2 = df.groupby().agg(sum_udf(df.v)) expected2 = df.groupby().agg(sum(df.v)) # groupby one column and one sql expression result3 = df.groupby(df.id, df.v % 2).agg(sum_udf(df.v)).orderBy(df.id, df.v % 2) expected3 = df.groupby(df.id, df.v % 2).agg(sum(df.v)).orderBy(df.id, df.v % 2) # groupby one python UDF result4 = df.groupby(plus_one(df.id)).agg(sum_udf(df.v)).sort("plus_one(id)") expected4 = df.groupby(plus_one(df.id)).agg(sum(df.v)).sort("plus_one(id)") # groupby one scalar pandas UDF result5 = df.groupby(plus_two(df.id)).agg(sum_udf(df.v)).sort("sum(v)") expected5 = df.groupby(plus_two(df.id)).agg(sum(df.v)).sort("sum(v)") # groupby one expression and one python UDF result6 = ( df.groupby(df.v % 2, plus_one(df.id)) .agg(sum_udf(df.v)) .sort(["(v % 2)", "plus_one(id)"]) ) expected6 = ( df.groupby(df.v % 2, plus_one(df.id)).agg(sum(df.v)).sort(["(v % 2)", "plus_one(id)"]) ) # groupby one expression and one scalar pandas UDF result7 = ( df.groupby(df.v % 2, plus_two(df.id)) .agg(sum_udf(df.v)) .sort(["sum(v)", "plus_two(id)"]) ) expected7 = ( df.groupby(df.v % 2, plus_two(df.id)).agg(sum(df.v)).sort(["sum(v)", "plus_two(id)"]) ) assert_frame_equal(expected1.toPandas(), result1.toPandas()) assert_frame_equal(expected2.toPandas(), result2.toPandas()) assert_frame_equal(expected3.toPandas(), result3.toPandas()) assert_frame_equal(expected4.toPandas(), result4.toPandas()) assert_frame_equal(expected5.toPandas(), result5.toPandas()) assert_frame_equal(expected6.toPandas(), result6.toPandas()) assert_frame_equal(expected7.toPandas(), result7.toPandas()) def test_complex_expressions(self): df = self.data plus_one = self.python_plus_one plus_two = self.pandas_scalar_plus_two sum_udf = self.pandas_agg_sum_udf # Test complex expressions with sql expression, python UDF and # group aggregate pandas UDF result1 = ( df.withColumn("v1", plus_one(df.v)) .withColumn("v2", df.v + 2) .groupby(df.id, df.v % 2) .agg( sum_udf(col("v")), sum_udf(col("v1") + 3), sum_udf(col("v2")) + 5, plus_one(sum_udf(col("v1"))), sum_udf(plus_one(col("v2"))), ) .sort(["id", "(v % 2)"]) .toPandas() .sort_values(by=["id", "(v % 2)"]) ) expected1 = ( df.withColumn("v1", df.v + 1) .withColumn("v2", df.v + 2) .groupby(df.id, df.v % 2) .agg( sum(col("v")), sum(col("v1") + 3), sum(col("v2")) + 5, plus_one(sum(col("v1"))), sum(plus_one(col("v2"))), ) .sort(["id", "(v % 2)"]) .toPandas() .sort_values(by=["id", "(v % 2)"]) ) # Test complex expressions with sql expression, scala pandas UDF and # group aggregate pandas UDF result2 = ( df.withColumn("v1", plus_one(df.v)) .withColumn("v2", df.v + 2) .groupby(df.id, df.v % 2) .agg( sum_udf(col("v")), sum_udf(col("v1") + 3), sum_udf(col("v2")) + 5, plus_two(sum_udf(col("v1"))), sum_udf(plus_two(col("v2"))), ) .sort(["id", "(v % 2)"]) .toPandas() .sort_values(by=["id", "(v % 2)"]) ) expected2 = ( df.withColumn("v1", df.v + 1) .withColumn("v2", df.v + 2) .groupby(df.id, df.v % 2) .agg( sum(col("v")), sum(col("v1") + 3), sum(col("v2")) + 5, plus_two(sum(col("v1"))), sum(plus_two(col("v2"))), ) .sort(["id", "(v % 2)"]) .toPandas() .sort_values(by=["id", "(v % 2)"]) ) # Test sequential groupby aggregate result3 = ( df.groupby("id") .agg(sum_udf(df.v).alias("v")) .groupby("id") .agg(sum_udf(col("v"))) .sort("id") .toPandas() ) expected3 = ( df.groupby("id") .agg(sum(df.v).alias("v")) .groupby("id") .agg(sum(col("v"))) .sort("id") .toPandas() ) assert_frame_equal(expected1, result1) assert_frame_equal(expected2, result2) assert_frame_equal(expected3, result3) def test_retain_group_columns(self): with self.sql_conf({"spark.sql.retainGroupColumns": False}): df = self.data sum_udf = self.pandas_agg_sum_udf result1 = df.groupby(df.id).agg(sum_udf(df.v)) expected1 = df.groupby(df.id).agg(sum(df.v)) assert_frame_equal(expected1.toPandas(), result1.toPandas()) def test_array_type(self): df = self.data array_udf = pandas_udf(lambda x: [1.0, 2.0], "array<double>", PandasUDFType.GROUPED_AGG) result1 = df.groupby("id").agg(array_udf(df["v"]).alias("v2")) self.assertEqual(result1.first()["v2"], [1.0, 2.0]) def test_invalid_args(self): df = self.data plus_one = self.python_plus_one mean_udf = self.pandas_agg_mean_udf with QuietTest(self.sc): with self.assertRaisesRegex(AnalysisException, "nor.*aggregate function"): df.groupby(df.id).agg(plus_one(df.v)).collect() with QuietTest(self.sc): with self.assertRaisesRegex( AnalysisException, "aggregate function.*argument.*aggregate function" ): df.groupby(df.id).agg(mean_udf(mean_udf(df.v))).collect() with QuietTest(self.sc): with self.assertRaisesRegex( AnalysisException, "mixture.*aggregate function.*group aggregate pandas UDF" ): df.groupby(df.id).agg(mean_udf(df.v), mean(df.v)).collect() def test_register_vectorized_udf_basic(self): sum_pandas_udf = pandas_udf( lambda v: v.sum(), "integer", PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF ) self.assertEqual(sum_pandas_udf.evalType, PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF) group_agg_pandas_udf = self.spark.udf.register("sum_pandas_udf", sum_pandas_udf) self.assertEqual(group_agg_pandas_udf.evalType, PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF) q = "SELECT sum_pandas_udf(v1) FROM VALUES (3, 0), (2, 0), (1, 1) tbl(v1, v2) GROUP BY v2" actual = sorted(map(lambda r: r[0], self.spark.sql(q).collect())) expected = [1, 5] self.assertEqual(actual, expected) def test_grouped_with_empty_partition(self): data = [Row(id=1, x=2), Row(id=1, x=3), Row(id=2, x=4)] expected = [Row(id=1, sum=5), Row(id=2, x=4)] num_parts = len(data) + 1 df = self.spark.createDataFrame(self.sc.parallelize(data, numSlices=num_parts)) f = pandas_udf(lambda x: x.sum(), "int", PandasUDFType.GROUPED_AGG) result = df.groupBy("id").agg(f(df["x"]).alias("sum")).collect() self.assertEqual(result, expected) def test_grouped_without_group_by_clause(self): @pandas_udf("double", PandasUDFType.GROUPED_AGG) def max_udf(v): return v.max() df = self.spark.range(0, 100) self.spark.udf.register("max_udf", max_udf) with self.tempView("table"): df.createTempView("table") agg1 = df.agg(max_udf(df["id"])) agg2 = self.spark.sql("select max_udf(id) from table") assert_frame_equal(agg1.toPandas(), agg2.toPandas()) def test_no_predicate_pushdown_through(self): # SPARK-30921: We should not pushdown predicates of PythonUDFs through Aggregate. import numpy as np @pandas_udf("float", PandasUDFType.GROUPED_AGG) def mean(x): return np.mean(x) df = self.spark.createDataFrame([Row(id=1, foo=42), Row(id=2, foo=1), Row(id=2, foo=2)]) agg = df.groupBy("id").agg(mean("foo").alias("mean")) filtered = agg.filter(agg["mean"] > 40.0) assert filtered.collect()[0]["mean"] == 42.0 if __name__ == "__main__": from pyspark.sql.tests.test_pandas_udf_grouped_agg import * # noqa: F401 try: import xmlrunner # type: ignore[import] testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) except ImportError: testRunner = None unittest.main(testRunner=testRunner, verbosity=2)
{ "content_hash": "936e16c3c3d787388fd2e35bb2973f67", "timestamp": "", "source": "github", "line_count": 540, "max_line_length": 100, "avg_line_length": 36.0462962962963, "alnum_prop": 0.548831235550989, "repo_name": "xuanyuanking/spark", "id": "e67190fa58896db0de815181490c9dc22c67db2e", "size": "20250", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/pyspark/sql/tests/test_pandas_udf_grouped_agg.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "54336" }, { "name": "Batchfile", "bytes": "27405" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "26221" }, { "name": "Dockerfile", "bytes": "9711" }, { "name": "HTML", "bytes": "42080" }, { "name": "HiveQL", "bytes": "1872438" }, { "name": "Java", "bytes": "4519872" }, { "name": "JavaScript", "bytes": "222664" }, { "name": "Jupyter Notebook", "bytes": "4310516" }, { "name": "Makefile", "bytes": "2374" }, { "name": "PLpgSQL", "bytes": "352963" }, { "name": "PowerShell", "bytes": "4221" }, { "name": "Python", "bytes": "7388289" }, { "name": "R", "bytes": "1272682" }, { "name": "ReScript", "bytes": "240" }, { "name": "Roff", "bytes": "31791" }, { "name": "Scala", "bytes": "40053974" }, { "name": "Shell", "bytes": "230591" }, { "name": "Thrift", "bytes": "2016" }, { "name": "q", "bytes": "98156" } ], "symlink_target": "" }
__all__ = ['general', 'utils', 'movies', 'tv', 'library']
{ "content_hash": "e2e2fe3987ee441b7ed1e26199e5aea2", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 57, "avg_line_length": 57, "alnum_prop": 0.5263157894736842, "repo_name": "Zo0x/hedhes", "id": "3b34406a8f716f25d8ef046bec49c10a26840a6b", "size": "57", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/viewers/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16730" }, { "name": "HTML", "bytes": "41034" }, { "name": "JavaScript", "bytes": "14809" }, { "name": "Python", "bytes": "59818" }, { "name": "Shell", "bytes": "830" } ], "symlink_target": "" }
""" This module implements OpenFlow 1.3.x. This module also implements some of extensions shown in "OpenFlow Extensions for 1.3.X Pack 1". Namely, the following extensions are implemented. - EXT-236 Bad flow entry priority error Extension - EXT-237 Set async config error Extension - EXT-256 PBB UCA header field Extension - EXT-260 Duplicate instruction error Extension - EXT-264 Multipart timeout errors Extension The following extensions are partially implemented. - EXT-187 Flow entry notifications Extension (ONFMP_FLOW_MONITOR only) - EXT-230 Bundle Extension (Error codes only) - EXT-232 Table synchronisation Extension (Error codes only) The following extensions are not implemented yet. - EXT-191 Role Status Extension - EXT-192-e Flow entry eviction Extension - EXT-192-v Vacancy events Extension """ import struct import itertools from ryu.lib import addrconv from ryu.lib import mac from ryu.lib.pack_utils import msg_pack_into from ryu import utils from ryu.ofproto.ofproto_parser import StringifyMixin, MsgBase, msg_str_attr from . import ether from . import nicira_ext from . import ofproto_parser from . import ofproto_common from . import ofproto_v1_3 as ofproto import logging LOG = logging.getLogger('ryu.ofproto.ofproto_v1_3_parser') _MSG_PARSERS = {} def _set_msg_type(msg_type): def _set_cls_msg_type(cls): cls.cls_msg_type = msg_type return cls return _set_cls_msg_type def _register_parser(cls): '''class decorator to register msg parser''' assert cls.cls_msg_type is not None assert cls.cls_msg_type not in _MSG_PARSERS _MSG_PARSERS[cls.cls_msg_type] = cls.parser return cls @ofproto_parser.register_msg_parser(ofproto.OFP_VERSION) def msg_parser(datapath, version, msg_type, msg_len, xid, buf): parser = _MSG_PARSERS.get(msg_type) return parser(datapath, version, msg_type, msg_len, xid, buf) @_register_parser @_set_msg_type(ofproto.OFPT_HELLO) class OFPHello(MsgBase): """ Hello message When connection is started, the hello message is exchanged between a switch and a controller. This message is handled by the Ryu framework, so the Ryu application do not need to process this typically. ========== ========================================================= Attribute Description ========== ========================================================= elements list of ``OFPHelloElemVersionBitmap`` instance ========== ========================================================= """ def __init__(self, datapath, elements=[]): super(OFPHello, self).__init__(datapath) self.elements = elements @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPHello, cls).parser(datapath, version, msg_type, msg_len, xid, buf) offset = ofproto.OFP_HELLO_HEADER_SIZE elems = [] while offset < msg.msg_len: type_, length = struct.unpack_from( ofproto.OFP_HELLO_ELEM_HEADER_PACK_STR, msg.buf, offset) # better to register Hello Element classes but currently # Only VerisonBitmap is supported so let's be simple. if type_ == ofproto.OFPHET_VERSIONBITMAP: elem = OFPHelloElemVersionBitmap.parser(msg.buf, offset) elems.append(elem) offset += length msg.elements = elems return msg class OFPHelloElemVersionBitmap(StringifyMixin): """ Version bitmap Hello Element ========== ========================================================= Attribute Description ========== ========================================================= versions list of versions of OpenFlow protocol a device supports ========== ========================================================= """ def __init__(self, versions, type_=None, length=None): super(OFPHelloElemVersionBitmap, self).__init__() self.type = ofproto.OFPHET_VERSIONBITMAP self.length = None self._bitmaps = None self.versions = versions @classmethod def parser(cls, buf, offset): type_, length = struct.unpack_from( ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR, buf, offset) assert type_ == ofproto.OFPHET_VERSIONBITMAP bitmaps_len = (length - ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE) offset += ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE bitmaps = [] while bitmaps_len >= 4: bitmap = struct.unpack_from('!I', buf, offset) bitmaps.append(bitmap[0]) offset += 4 bitmaps_len -= 4 versions = [i * 32 + shift for i, bitmap in enumerate(bitmaps) for shift in range(31) if bitmap & (1 << shift)] elem = cls(versions) elem.length = length elem._bitmaps = bitmaps return elem @_register_parser @_set_msg_type(ofproto.OFPT_ERROR) class OFPErrorMsg(MsgBase): """ Error message The switch notifies controller of problems by this message. ========== ========================================================= Attribute Description ========== ========================================================= type High level type of error code Details depending on the type data Variable length data depending on the type and code ========== ========================================================= ``type`` attribute corresponds to ``type_`` parameter of __init__. Types and codes are defined in ``ryu.ofproto.ofproto``. ============================= =========== Type Code ============================= =========== OFPET_HELLO_FAILED OFPHFC_* OFPET_BAD_REQUEST OFPBRC_* OFPET_BAD_ACTION OFPBAC_* OFPET_BAD_INSTRUCTION OFPBIC_* OFPET_BAD_MATCH OFPBMC_* OFPET_FLOW_MOD_FAILED OFPFMFC_* OFPET_GROUP_MOD_FAILED OFPGMFC_* OFPET_PORT_MOD_FAILED OFPPMFC_* OFPET_TABLE_MOD_FAILED OFPTMFC_* OFPET_QUEUE_OP_FAILED OFPQOFC_* OFPET_SWITCH_CONFIG_FAILED OFPSCFC_* OFPET_ROLE_REQUEST_FAILED OFPRRFC_* OFPET_METER_MOD_FAILED OFPMMFC_* OFPET_TABLE_FEATURES_FAILED OFPTFFC_* OFPET_EXPERIMENTER N/A ============================= =========== Example:: @set_ev_cls(ofp_event.EventOFPErrorMsg, [HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER]) def error_msg_handler(self, ev): msg = ev.msg self.logger.debug('OFPErrorMsg received: type=0x%02x code=0x%02x ' 'message=%s', msg.type, msg.code, utils.hex_array(msg.data)) """ def __init__(self, datapath, type_=None, code=None, data=None): super(OFPErrorMsg, self).__init__(datapath) self.type = type_ self.code = code self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): type_, = struct.unpack_from('!H', buffer(buf), ofproto.OFP_HEADER_SIZE) if type_ == ofproto.OFPET_EXPERIMENTER: return OFPErrorExperimenterMsg.parser(datapath, version, msg_type, msg_len, xid, buf) msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.type, msg.code = struct.unpack_from( ofproto.OFP_ERROR_MSG_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.data = msg.buf[ofproto.OFP_ERROR_MSG_SIZE:] return msg def _serialize_body(self): assert self.data is not None msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.type, self.code) self.buf += self.data class OFPErrorExperimenterMsg(MsgBase): def __init__(self, datapath, type_=None, exp_type=None, experimenter=None, data=None): super(OFPErrorExperimenterMsg, self).__init__(datapath) self.type = ofproto.OFPET_EXPERIMENTER self.exp_type = exp_type self.experimenter = experimenter self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): cls.cls_msg_type = msg_type msg = super(OFPErrorExperimenterMsg, cls).parser( datapath, version, msg_type, msg_len, xid, buf) msg.type, msg.exp_type, msg.experimenter = struct.unpack_from( ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.data = msg.buf[ofproto.OFP_ERROR_EXPERIMENTER_MSG_SIZE:] return msg def _serialize_body(self): assert self.data is not None msg_pack_into(ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.type, self.exp_type, self.experimenter) self.buf += self.data @_register_parser @_set_msg_type(ofproto.OFPT_ECHO_REQUEST) class OFPEchoRequest(MsgBase): """ Echo request message This message is handled by the Ryu framework, so the Ryu application do not need to process this typically. ========== ========================================================= Attribute Description ========== ========================================================= data An arbitrary length data ========== ========================================================= Example:: def send_echo_request(self, datapath, data): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPEchoRequest(datapath, data) datapath.send_msg(req) @set_ev_cls(ofp_event.EventOFPEchoRequest, [HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER]) def echo_request_handler(self, ev): self.logger.debug('OFPEchoRequest received: data=%s', utils.hex_array(ev.msg.data)) """ def __init__(self, datapath, data=None): super(OFPEchoRequest, self).__init__(datapath) self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPEchoRequest, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:] return msg def _serialize_body(self): if self.data is not None: self.buf += self.data @_register_parser @_set_msg_type(ofproto.OFPT_ECHO_REPLY) class OFPEchoReply(MsgBase): """ Echo reply message This message is handled by the Ryu framework, so the Ryu application do not need to process this typically. ========== ========================================================= Attribute Description ========== ========================================================= data An arbitrary length data ========== ========================================================= Example:: def send_echo_reply(self, datapath, data): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser reply = ofp_parser.OFPEchoReply(datapath, data) datapath.send_msg(reply) @set_ev_cls(ofp_event.EventOFPEchoReply, [HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER]) def echo_reply_handler(self, ev): self.logger.debug('OFPEchoReply received: data=%s', utils.hex_array(ev.msg.data)) """ def __init__(self, datapath, data=None): super(OFPEchoReply, self).__init__(datapath) self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:] return msg def _serialize_body(self): assert self.data is not None self.buf += self.data @_register_parser @_set_msg_type(ofproto.OFPT_EXPERIMENTER) class OFPExperimenter(MsgBase): """ Experimenter extension message ============= ========================================================= Attribute Description ============= ========================================================= experimenter Experimenter ID exp_type Experimenter defined data Experimenter defined arbitrary additional data ============= ========================================================= """ def __init__(self, datapath, experimenter=None, exp_type=None, data=None): super(OFPExperimenter, self).__init__(datapath) self.experimenter = experimenter self.exp_type = exp_type self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPExperimenter, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.experimenter, msg.exp_type) = struct.unpack_from( ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.data = msg.buf[ofproto.OFP_EXPERIMENTER_HEADER_SIZE:] return msg def _serialize_body(self): assert self.data is not None msg_pack_into(ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.experimenter, self.exp_type) self.buf += self.data @_register_parser @_set_msg_type(ofproto.OFPT_FEATURES_REQUEST) class OFPFeaturesRequest(MsgBase): """ Features request message The controller sends a feature request to the switch upon session establishment. This message is handled by the Ryu framework, so the Ryu application do not need to process this typically. Example:: def send_features_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPFeaturesRequest(datapath) datapath.send_msg(req) """ def __init__(self, datapath): super(OFPFeaturesRequest, self).__init__(datapath) @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPFeaturesRequest, cls).parser(datapath, version, msg_type, msg_len, xid, buf) return msg @_register_parser @_set_msg_type(ofproto.OFPT_FEATURES_REPLY) class OFPSwitchFeatures(MsgBase): """ Features reply message The switch responds with a features reply message to a features request. This message is handled by the Ryu framework, so the Ryu application do not need to process this typically. Example:: @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) def switch_features_handler(self, ev): msg = ev.msg self.logger.debug('OFPSwitchFeatures received: ' 'datapath_id=0x%016x n_buffers=%d ' 'n_tables=%d auxiliary_id=%d ' 'capabilities=0x%08x', msg.datapath_id, msg.n_buffers, msg.n_tables, msg.auxiliary_id, msg.capabilities) """ def __init__(self, datapath, datapath_id=None, n_buffers=None, n_tables=None, auxiliary_id=None, capabilities=None): super(OFPSwitchFeatures, self).__init__(datapath) self.datapath_id = datapath_id self.n_buffers = n_buffers self.n_tables = n_tables self.auxiliary_id = auxiliary_id self.capabilities = capabilities @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPSwitchFeatures, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.datapath_id, msg.n_buffers, msg.n_tables, msg.auxiliary_id, msg.capabilities, msg._reserved) = struct.unpack_from( ofproto.OFP_SWITCH_FEATURES_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) return msg @_set_msg_type(ofproto.OFPT_GET_CONFIG_REQUEST) class OFPGetConfigRequest(MsgBase): """ Get config request message The controller sends a get config request to query configuration parameters in the switch. Example:: def send_get_config_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGetConfigRequest(datapath) datapath.send_msg(req) """ def __init__(self, datapath): super(OFPGetConfigRequest, self).__init__(datapath) @_register_parser @_set_msg_type(ofproto.OFPT_GET_CONFIG_REPLY) class OFPGetConfigReply(MsgBase): """ Get config reply message The switch responds to a configuration request with a get config reply message. ============= ========================================================= Attribute Description ============= ========================================================= flags Bitmap of the following flags. | OFPC_FRAG_NORMAL | OFPC_FRAG_DROP | OFPC_FRAG_REASM | OFPC_FRAG_MASK miss_send_len Max bytes of new flow that datapath should send to the controller ============= ========================================================= Example:: @set_ev_cls(ofp_event.EventOFPGetConfigReply, MAIN_DISPATCHER) def get_config_reply_handler(self, ev): msg = ev.msg dp = msg.datapath ofp = dp.ofproto flags = [] if msg.flags & ofp.OFPC_FRAG_NORMAL: flags.append('NORMAL') if msg.flags & ofp.OFPC_FRAG_DROP: flags.append('DROP') if msg.flags & ofp.OFPC_FRAG_REASM: flags.append('REASM') self.logger.debug('OFPGetConfigReply received: ' 'flags=%s miss_send_len=%d', ','.join(flags), msg.miss_send_len) """ def __init__(self, datapath, flags=None, miss_send_len=None): super(OFPGetConfigReply, self).__init__(datapath) self.flags = flags self.miss_send_len = miss_send_len @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPGetConfigReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.flags, msg.miss_send_len = struct.unpack_from( ofproto.OFP_SWITCH_CONFIG_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) return msg @_register_parser @_set_msg_type(ofproto.OFPT_SET_CONFIG) class OFPSetConfig(MsgBase): """ Set config request message The controller sends a set config request message to set configuraion parameters. ============= ========================================================= Attribute Description ============= ========================================================= flags Bitmap of the following flags. | OFPC_FRAG_NORMAL | OFPC_FRAG_DROP | OFPC_FRAG_REASM miss_send_len Max bytes of new flow that datapath should send to the controller ============= ========================================================= Example:: def send_set_config(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPSetConfig(datapath, ofp.OFPC_FRAG_NORMAL, 256) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, miss_send_len=0): super(OFPSetConfig, self).__init__(datapath) self.flags = flags self.miss_send_len = miss_send_len def _serialize_body(self): assert self.flags is not None assert self.miss_send_len is not None msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.flags, self.miss_send_len) @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPSetConfig, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.flags, msg.miss_send_len = struct.unpack_from( ofproto.OFP_SWITCH_CONFIG_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) return msg UINT64_MAX = (1 << 64) - 1 UINT32_MAX = (1 << 32) - 1 UINT16_MAX = (1 << 16) - 1 class Flow(object): def __init__(self): self.in_port = 0 self.in_phy_port = 0 self.metadata = 0 self.dl_dst = mac.DONTCARE self.dl_src = mac.DONTCARE self.dl_type = 0 self.vlan_vid = 0 self.vlan_pcp = 0 self.ip_dscp = 0 self.ip_ecn = 0 self.ip_proto = 0 self.ipv4_src = 0 self.ipv4_dst = 0 self.tcp_src = 0 self.tcp_dst = 0 self.udp_src = 0 self.udp_dst = 0 self.sctp_src = 0 self.sctp_dst = 0 self.icmpv4_type = 0 self.icmpv4_code = 0 self.arp_op = 0 self.arp_spa = 0 self.arp_tpa = 0 self.arp_sha = 0 self.arp_tha = 0 self.ipv6_src = [] self.ipv6_dst = [] self.ipv6_flabel = 0 self.icmpv6_type = 0 self.icmpv6_code = 0 self.ipv6_nd_target = [] self.ipv6_nd_sll = 0 self.ipv6_nd_tll = 0 self.mpls_label = 0 self.mpls_tc = 0 self.mpls_bos = 0 self.pbb_isid = 0 self.tunnel_id = 0 self.ipv6_exthdr = 0 class FlowWildcards(object): def __init__(self): self.metadata_mask = 0 self.dl_dst_mask = 0 self.dl_src_mask = 0 self.vlan_vid_mask = 0 self.ipv4_src_mask = 0 self.ipv4_dst_mask = 0 self.arp_spa_mask = 0 self.arp_tpa_mask = 0 self.arp_sha_mask = 0 self.arp_tha_mask = 0 self.ipv6_src_mask = [] self.ipv6_dst_mask = [] self.ipv6_flabel_mask = 0 self.pbb_isid_mask = 0 self.tunnel_id_mask = 0 self.ipv6_exthdr_mask = 0 self.wildcards = (1 << 64) - 1 def ft_set(self, shift): self.wildcards &= ~(1 << shift) def ft_test(self, shift): return not self.wildcards & (1 << shift) class OFPMatch(StringifyMixin): """ Flow Match Structure This class is implementation of the flow match structure having compose/query API. There are new API and old API for compatibility. the old API is supposed to be removed later. You can define the flow match by the keyword arguments. The following arguments are available. ================ =============== ================================== Argument Value Description ================ =============== ================================== in_port Integer 32bit Switch input port in_phy_port Integer 32bit Switch physical input port metadata Integer 64bit Metadata passed between tables eth_dst MAC address Ethernet destination address eth_src MAC address Ethernet source address eth_type Integer 16bit Ethernet frame type vlan_vid Integer 16bit VLAN id vlan_pcp Integer 8bit VLAN priority ip_dscp Integer 8bit IP DSCP (6 bits in ToS field) ip_ecn Integer 8bit IP ECN (2 bits in ToS field) ip_proto Integer 8bit IP protocol ipv4_src IPv4 address IPv4 source address ipv4_dst IPv4 address IPv4 destination address tcp_src Integer 16bit TCP source port tcp_dst Integer 16bit TCP destination port udp_src Integer 16bit UDP source port udp_dst Integer 16bit UDP destination port sctp_src Integer 16bit SCTP source port sctp_dst Integer 16bit SCTP destination port icmpv4_type Integer 8bit ICMP type icmpv4_code Integer 8bit ICMP code arp_op Integer 16bit ARP opcode arp_spa IPv4 address ARP source IPv4 address arp_tpa IPv4 address ARP target IPv4 address arp_sha MAC address ARP source hardware address arp_tha MAC address ARP target hardware address ipv6_src IPv6 address IPv6 source address ipv6_dst IPv6 address IPv6 destination address ipv6_flabel Integer 32bit IPv6 Flow Label icmpv6_type Integer 8bit ICMPv6 type icmpv6_code Integer 8bit ICMPv6 code ipv6_nd_target IPv6 address Target address for ND ipv6_nd_sll MAC address Source link-layer for ND ipv6_nd_tll MAC address Target link-layer for ND mpls_label Integer 32bit MPLS label mpls_tc Integer 8bit MPLS TC mpls_bos Integer 8bit MPLS BoS bit pbb_isid Integer 24bit PBB I-SID tunnel_id Integer 64bit Logical Port Metadata ipv6_exthdr Integer 16bit IPv6 Extension Header pseudo-field ================ =============== ================================== Example:: >>> # compose >>> match = parser.OFPMatch( ... in_port=1, ... eth_type=0x86dd, ... ipv6_src=('2001:db8:bd05:1d2:288a:1fc0:1:10ee', ... 'ffff:ffff:ffff:ffff::'), ... ipv6_dst='2001:db8:bd05:1d2:288a:1fc0:1:10ee') >>> # query >>> if 'ipv6_src' in match: ... print match['ipv6_src'] ... ('2001:db8:bd05:1d2:288a:1fc0:1:10ee', 'ffff:ffff:ffff:ffff::') .. Note:: For VLAN id match field, special values are defined in OpenFlow Spec. 1) Packets with and without a VLAN tag - Example:: match = parser.OFPMatch() - Packet Matching ====================== ===== non-VLAN-tagged MATCH VLAN-tagged(vlan_id=3) MATCH VLAN-tagged(vlan_id=5) MATCH ====================== ===== 2) Only packets without a VLAN tag - Example:: match = parser.OFPMatch(vlan_vid=0x0000) - Packet Matching ====================== ===== non-VLAN-tagged MATCH VLAN-tagged(vlan_id=3) x VLAN-tagged(vlan_id=5) x ====================== ===== 3) Only packets with a VLAN tag regardless of its value - Example:: match = parser.OFPMatch(vlan_vid=(0x1000, 0x1000)) - Packet Matching ====================== ===== non-VLAN-tagged x VLAN-tagged(vlan_id=3) MATCH VLAN-tagged(vlan_id=5) MATCH ====================== ===== 4) Only packets with VLAN tag and VID equal - Example:: match = parser.OFPMatch(vlan_vid=(0x1000 | 3)) - Packet Matching ====================== ===== non-VLAN-tagged x VLAN-tagged(vlan_id=3) MATCH VLAN-tagged(vlan_id=5) x ====================== ===== """ def __init__(self, type_=None, length=None, _ordered_fields=None, **kwargs): """ You can define the flow match by the keyword arguments. Please refer to ofproto.oxm_types for the key which you can define. """ super(OFPMatch, self).__init__() self._wc = FlowWildcards() self._flow = Flow() self.fields = [] self.type = ofproto.OFPMT_OXM self.length = length if _ordered_fields is not None: assert not kwargs self._fields2 = _ordered_fields else: # eg. # OFPMatch(eth_src=('ff:ff:ff:00:00:00'), eth_type=0x800, # ipv4_src='10.0.0.1') kwargs = dict(ofproto.oxm_normalize_user(k, v) for (k, v) in kwargs.items()) fields = [ofproto.oxm_from_user(k, v) for (k, v) in kwargs.items()] # assumption: sorting by OXM type values makes fields # meet ordering requirements (eg. eth_type before ipv4_src) fields.sort() self._fields2 = [ofproto.oxm_to_user(n, v, m) for (n, v, m) in fields] def __getitem__(self, key): return dict(self._fields2)[key] def __contains__(self, key): return key in dict(self._fields2) def iteritems(self): return dict(self._fields2).iteritems() def get(self, key, default=None): return dict(self._fields2).get(key, default) def stringify_attrs(self): yield "oxm_fields", dict(self._fields2) def to_jsondict(self): """ Returns a dict expressing the flow match. """ # XXX old api compat if self._composed_with_old_api(): # copy object first because serialize_old is destructive o2 = OFPMatch() o2.fields = self.fields[:] # serialize and parse to fill OFPMatch._fields2 buf = bytearray() o2.serialize(buf, 0) o = OFPMatch.parser(str(buf), 0) else: o = self body = {"oxm_fields": [ofproto.oxm_to_jsondict(k, uv) for k, uv in o._fields2], "length": o.length, "type": o.type} return {self.__class__.__name__: body} @classmethod def from_jsondict(cls, dict_): """ Returns an object which is generated from a dict. Exception raises: KeyError -- Unknown match field is defined in dict """ fields = [ofproto.oxm_from_jsondict(f) for f in dict_['oxm_fields']] o = OFPMatch(_ordered_fields=fields) # XXX old api compat # serialize and parse to fill OFPMatch.fields buf = bytearray() o.serialize(buf, 0) return OFPMatch.parser(str(buf), 0) def __str__(self): # XXX old api compat if self._composed_with_old_api(): # copy object first because serialize_old is destructive o2 = OFPMatch() o2.fields = self.fields[:] # serialize and parse to fill OFPMatch._fields2 buf = bytearray() o2.serialize(buf, 0) o = OFPMatch.parser(str(buf), 0) else: o = self return super(OFPMatch, o).__str__() __repr__ = __str__ def append_field(self, header, value, mask=None): """ Append a match field. ========= ======================================================= Argument Description ========= ======================================================= header match field header ID which is defined automatically in ``ofproto`` value match field value mask mask value to the match field ========= ======================================================= The available ``header`` is as follows. ====================== =================================== Header ID Description ====================== =================================== OXM_OF_IN_PORT Switch input port OXM_OF_IN_PHY_PORT Switch physical input port OXM_OF_METADATA Metadata passed between tables OXM_OF_ETH_DST Ethernet destination address OXM_OF_ETH_SRC Ethernet source address OXM_OF_ETH_TYPE Ethernet frame type OXM_OF_VLAN_VID VLAN id OXM_OF_VLAN_PCP VLAN priority OXM_OF_IP_DSCP IP DSCP (6 bits in ToS field) OXM_OF_IP_ECN IP ECN (2 bits in ToS field) OXM_OF_IP_PROTO IP protocol OXM_OF_IPV4_SRC IPv4 source address OXM_OF_IPV4_DST IPv4 destination address OXM_OF_TCP_SRC TCP source port OXM_OF_TCP_DST TCP destination port OXM_OF_UDP_SRC UDP source port OXM_OF_UDP_DST UDP destination port OXM_OF_SCTP_SRC SCTP source port OXM_OF_SCTP_DST SCTP destination port OXM_OF_ICMPV4_TYPE ICMP type OXM_OF_ICMPV4_CODE ICMP code OXM_OF_ARP_OP ARP opcode OXM_OF_ARP_SPA ARP source IPv4 address OXM_OF_ARP_TPA ARP target IPv4 address OXM_OF_ARP_SHA ARP source hardware address OXM_OF_ARP_THA ARP target hardware address OXM_OF_IPV6_SRC IPv6 source address OXM_OF_IPV6_DST IPv6 destination address OXM_OF_IPV6_FLABEL IPv6 Flow Label OXM_OF_ICMPV6_TYPE ICMPv6 type OXM_OF_ICMPV6_CODE ICMPv6 code OXM_OF_IPV6_ND_TARGET Target address for ND OXM_OF_IPV6_ND_SLL Source link-layer for ND OXM_OF_IPV6_ND_TLL Target link-layer for ND OXM_OF_MPLS_LABEL MPLS label OXM_OF_MPLS_TC MPLS TC OXM_OF_MPLS_BOS MPLS BoS bit OXM_OF_PBB_ISID PBB I-SID OXM_OF_TUNNEL_ID Logical Port Metadata OXM_OF_IPV6_EXTHDR IPv6 Extension Header pseudo-field ====================== =================================== """ self.fields.append(OFPMatchField.make(header, value, mask)) def _composed_with_old_api(self): return (self.fields and not self._fields2) or \ self._wc.__dict__ != FlowWildcards().__dict__ def serialize(self, buf, offset): """ Outputs the expression of the wire protocol of the flow match into the buf. Returns the output length. """ # XXX compat if self._composed_with_old_api(): return self.serialize_old(buf, offset) fields = [ofproto.oxm_from_user(k, uv) for (k, uv) in self._fields2] hdr_pack_str = '!HH' field_offset = offset + struct.calcsize(hdr_pack_str) for (n, value, mask) in fields: field_offset += ofproto.oxm_serialize(n, value, mask, buf, field_offset) length = field_offset - offset msg_pack_into(hdr_pack_str, buf, offset, ofproto.OFPMT_OXM, length) self.length = length pad_len = utils.round_up(length, 8) - length msg_pack_into("%dx" % pad_len, buf, field_offset) return length + pad_len def serialize_old(self, buf, offset): if hasattr(self, '_serialized'): raise Exception('serializing an OFPMatch composed with ' 'old API multiple times is not supported') self._serialized = True if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PORT): self.append_field(ofproto.OXM_OF_IN_PORT, self._flow.in_port) if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PHY_PORT): self.append_field(ofproto.OXM_OF_IN_PHY_PORT, self._flow.in_phy_port) if self._wc.ft_test(ofproto.OFPXMT_OFB_METADATA): if self._wc.metadata_mask == UINT64_MAX: header = ofproto.OXM_OF_METADATA else: header = ofproto.OXM_OF_METADATA_W self.append_field(header, self._flow.metadata, self._wc.metadata_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_DST): if self._wc.dl_dst_mask: header = ofproto.OXM_OF_ETH_DST_W else: header = ofproto.OXM_OF_ETH_DST self.append_field(header, self._flow.dl_dst, self._wc.dl_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_SRC): if self._wc.dl_src_mask: header = ofproto.OXM_OF_ETH_SRC_W else: header = ofproto.OXM_OF_ETH_SRC self.append_field(header, self._flow.dl_src, self._wc.dl_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_TYPE): self.append_field(ofproto.OXM_OF_ETH_TYPE, self._flow.dl_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_VID): if self._wc.vlan_vid_mask == UINT16_MAX: header = ofproto.OXM_OF_VLAN_VID else: header = ofproto.OXM_OF_VLAN_VID_W self.append_field(header, self._flow.vlan_vid, self._wc.vlan_vid_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_PCP): self.append_field(ofproto.OXM_OF_VLAN_PCP, self._flow.vlan_pcp) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_DSCP): self.append_field(ofproto.OXM_OF_IP_DSCP, self._flow.ip_dscp) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_ECN): self.append_field(ofproto.OXM_OF_IP_ECN, self._flow.ip_ecn) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_PROTO): self.append_field(ofproto.OXM_OF_IP_PROTO, self._flow.ip_proto) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_SRC): if self._wc.ipv4_src_mask == UINT32_MAX: header = ofproto.OXM_OF_IPV4_SRC else: header = ofproto.OXM_OF_IPV4_SRC_W self.append_field(header, self._flow.ipv4_src, self._wc.ipv4_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_DST): if self._wc.ipv4_dst_mask == UINT32_MAX: header = ofproto.OXM_OF_IPV4_DST else: header = ofproto.OXM_OF_IPV4_DST_W self.append_field(header, self._flow.ipv4_dst, self._wc.ipv4_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_SRC): self.append_field(ofproto.OXM_OF_TCP_SRC, self._flow.tcp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_DST): self.append_field(ofproto.OXM_OF_TCP_DST, self._flow.tcp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_SRC): self.append_field(ofproto.OXM_OF_UDP_SRC, self._flow.udp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_DST): self.append_field(ofproto.OXM_OF_UDP_DST, self._flow.udp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_SRC): self.append_field(ofproto.OXM_OF_SCTP_SRC, self._flow.sctp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_DST): self.append_field(ofproto.OXM_OF_SCTP_DST, self._flow.sctp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_TYPE): self.append_field(ofproto.OXM_OF_ICMPV4_TYPE, self._flow.icmpv4_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_CODE): self.append_field(ofproto.OXM_OF_ICMPV4_CODE, self._flow.icmpv4_code) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_OP): self.append_field(ofproto.OXM_OF_ARP_OP, self._flow.arp_op) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SPA): if self._wc.arp_spa_mask == UINT32_MAX: header = ofproto.OXM_OF_ARP_SPA else: header = ofproto.OXM_OF_ARP_SPA_W self.append_field(header, self._flow.arp_spa, self._wc.arp_spa_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_TPA): if self._wc.arp_tpa_mask == UINT32_MAX: header = ofproto.OXM_OF_ARP_TPA else: header = ofproto.OXM_OF_ARP_TPA_W self.append_field(header, self._flow.arp_tpa, self._wc.arp_tpa_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SHA): if self._wc.arp_sha_mask: header = ofproto.OXM_OF_ARP_SHA_W else: header = ofproto.OXM_OF_ARP_SHA self.append_field(header, self._flow.arp_sha, self._wc.arp_sha_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_THA): if self._wc.arp_tha_mask: header = ofproto.OXM_OF_ARP_THA_W else: header = ofproto.OXM_OF_ARP_THA self.append_field(header, self._flow.arp_tha, self._wc.arp_tha_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_SRC): if len(self._wc.ipv6_src_mask): header = ofproto.OXM_OF_IPV6_SRC_W else: header = ofproto.OXM_OF_IPV6_SRC self.append_field(header, self._flow.ipv6_src, self._wc.ipv6_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_DST): if len(self._wc.ipv6_dst_mask): header = ofproto.OXM_OF_IPV6_DST_W else: header = ofproto.OXM_OF_IPV6_DST self.append_field(header, self._flow.ipv6_dst, self._wc.ipv6_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_FLABEL): if self._wc.ipv6_flabel_mask == UINT32_MAX: header = ofproto.OXM_OF_IPV6_FLABEL else: header = ofproto.OXM_OF_IPV6_FLABEL_W self.append_field(header, self._flow.ipv6_flabel, self._wc.ipv6_flabel_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_TYPE): self.append_field(ofproto.OXM_OF_ICMPV6_TYPE, self._flow.icmpv6_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_CODE): self.append_field(ofproto.OXM_OF_ICMPV6_CODE, self._flow.icmpv6_code) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TARGET): self.append_field(ofproto.OXM_OF_IPV6_ND_TARGET, self._flow.ipv6_nd_target) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_SLL): self.append_field(ofproto.OXM_OF_IPV6_ND_SLL, self._flow.ipv6_nd_sll) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TLL): self.append_field(ofproto.OXM_OF_IPV6_ND_TLL, self._flow.ipv6_nd_tll) if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_LABEL): self.append_field(ofproto.OXM_OF_MPLS_LABEL, self._flow.mpls_label) if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_TC): self.append_field(ofproto.OXM_OF_MPLS_TC, self._flow.mpls_tc) if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_BOS): self.append_field(ofproto.OXM_OF_MPLS_BOS, self._flow.mpls_bos) if self._wc.ft_test(ofproto.OFPXMT_OFB_PBB_ISID): if self._wc.pbb_isid_mask: header = ofproto.OXM_OF_PBB_ISID_W else: header = ofproto.OXM_OF_PBB_ISID self.append_field(header, self._flow.pbb_isid, self._wc.pbb_isid_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_TUNNEL_ID): if self._wc.tunnel_id_mask: header = ofproto.OXM_OF_TUNNEL_ID_W else: header = ofproto.OXM_OF_TUNNEL_ID self.append_field(header, self._flow.tunnel_id, self._wc.tunnel_id_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_EXTHDR): if self._wc.ipv6_exthdr_mask: header = ofproto.OXM_OF_IPV6_EXTHDR_W else: header = ofproto.OXM_OF_IPV6_EXTHDR self.append_field(header, self._flow.ipv6_exthdr, self._wc.ipv6_exthdr_mask) field_offset = offset + 4 for f in self.fields: f.serialize(buf, field_offset) field_offset += f.length length = field_offset - offset msg_pack_into('!HH', buf, offset, ofproto.OFPMT_OXM, length) pad_len = utils.round_up(length, 8) - length msg_pack_into("%dx" % pad_len, buf, field_offset) return length + pad_len @classmethod def parser(cls, buf, offset): """ Returns an object which is generated from a buffer including the expression of the wire protocol of the flow match. """ match = OFPMatch() type_, length = struct.unpack_from('!HH', buf, offset) match.type = type_ match.length = length # ofp_match adjustment offset += 4 length -= 4 # XXXcompat cls.parser_old(match, buf, offset, length) fields = [] while length > 0: n, value, mask, field_len = ofproto.oxm_parse(buf, offset) k, uv = ofproto.oxm_to_user(n, value, mask) fields.append((k, uv)) offset += field_len length -= field_len match._fields2 = fields return match @staticmethod def parser_old(match, buf, offset, length): while length > 0: field = OFPMatchField.parser(buf, offset) offset += field.length length -= field.length match.fields.append(field) def set_in_port(self, port): self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PORT) self._flow.in_port = port def set_in_phy_port(self, phy_port): self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PHY_PORT) self._flow.in_phy_port = phy_port def set_metadata(self, metadata): self.set_metadata_masked(metadata, UINT64_MAX) def set_metadata_masked(self, metadata, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_METADATA) self._wc.metadata_mask = mask self._flow.metadata = metadata & mask def set_dl_dst(self, dl_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST) self._flow.dl_dst = dl_dst def set_dl_dst_masked(self, dl_dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST) self._wc.dl_dst_mask = mask # bit-wise and of the corresponding elements of dl_dst and mask self._flow.dl_dst = mac.haddr_bitand(dl_dst, mask) def set_dl_src(self, dl_src): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC) self._flow.dl_src = dl_src def set_dl_src_masked(self, dl_src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC) self._wc.dl_src_mask = mask self._flow.dl_src = mac.haddr_bitand(dl_src, mask) def set_dl_type(self, dl_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_TYPE) self._flow.dl_type = dl_type def set_vlan_vid_none(self): self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID) self._wc.vlan_vid_mask = UINT16_MAX self._flow.vlan_vid = ofproto.OFPVID_NONE def set_vlan_vid(self, vid): self.set_vlan_vid_masked(vid, UINT16_MAX) def set_vlan_vid_masked(self, vid, mask): vid |= ofproto.OFPVID_PRESENT self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID) self._wc.vlan_vid_mask = mask self._flow.vlan_vid = vid def set_vlan_pcp(self, pcp): self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_PCP) self._flow.vlan_pcp = pcp def set_ip_dscp(self, ip_dscp): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_DSCP) self._flow.ip_dscp = ip_dscp def set_ip_ecn(self, ip_ecn): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_ECN) self._flow.ip_ecn = ip_ecn def set_ip_proto(self, ip_proto): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_PROTO) self._flow.ip_proto = ip_proto def set_ipv4_src(self, ipv4_src): self.set_ipv4_src_masked(ipv4_src, UINT32_MAX) def set_ipv4_src_masked(self, ipv4_src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_SRC) self._flow.ipv4_src = ipv4_src self._wc.ipv4_src_mask = mask def set_ipv4_dst(self, ipv4_dst): self.set_ipv4_dst_masked(ipv4_dst, UINT32_MAX) def set_ipv4_dst_masked(self, ipv4_dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_DST) self._flow.ipv4_dst = ipv4_dst self._wc.ipv4_dst_mask = mask def set_tcp_src(self, tcp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_SRC) self._flow.tcp_src = tcp_src def set_tcp_dst(self, tcp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_DST) self._flow.tcp_dst = tcp_dst def set_udp_src(self, udp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_SRC) self._flow.udp_src = udp_src def set_udp_dst(self, udp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_DST) self._flow.udp_dst = udp_dst def set_sctp_src(self, sctp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_SRC) self._flow.sctp_src = sctp_src def set_sctp_dst(self, sctp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_DST) self._flow.sctp_dst = sctp_dst def set_icmpv4_type(self, icmpv4_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_TYPE) self._flow.icmpv4_type = icmpv4_type def set_icmpv4_code(self, icmpv4_code): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_CODE) self._flow.icmpv4_code = icmpv4_code def set_arp_opcode(self, arp_op): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_OP) self._flow.arp_op = arp_op def set_arp_spa(self, arp_spa): self.set_arp_spa_masked(arp_spa, UINT32_MAX) def set_arp_spa_masked(self, arp_spa, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SPA) self._wc.arp_spa_mask = mask self._flow.arp_spa = arp_spa def set_arp_tpa(self, arp_tpa): self.set_arp_tpa_masked(arp_tpa, UINT32_MAX) def set_arp_tpa_masked(self, arp_tpa, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_TPA) self._wc.arp_tpa_mask = mask self._flow.arp_tpa = arp_tpa def set_arp_sha(self, arp_sha): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA) self._flow.arp_sha = arp_sha def set_arp_sha_masked(self, arp_sha, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA) self._wc.arp_sha_mask = mask self._flow.arp_sha = mac.haddr_bitand(arp_sha, mask) def set_arp_tha(self, arp_tha): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA) self._flow.arp_tha = arp_tha def set_arp_tha_masked(self, arp_tha, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA) self._wc.arp_tha_mask = mask self._flow.arp_tha = mac.haddr_bitand(arp_tha, mask) def set_ipv6_src(self, src): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC) self._flow.ipv6_src = src def set_ipv6_src_masked(self, src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC) self._wc.ipv6_src_mask = mask self._flow.ipv6_src = [x & y for (x, y) in itertools.izip(src, mask)] def set_ipv6_dst(self, dst): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST) self._flow.ipv6_dst = dst def set_ipv6_dst_masked(self, dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST) self._wc.ipv6_dst_mask = mask self._flow.ipv6_dst = [x & y for (x, y) in itertools.izip(dst, mask)] def set_ipv6_flabel(self, flabel): self.set_ipv6_flabel_masked(flabel, UINT32_MAX) def set_ipv6_flabel_masked(self, flabel, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_FLABEL) self._wc.ipv6_flabel_mask = mask self._flow.ipv6_flabel = flabel def set_icmpv6_type(self, icmpv6_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_TYPE) self._flow.icmpv6_type = icmpv6_type def set_icmpv6_code(self, icmpv6_code): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_CODE) self._flow.icmpv6_code = icmpv6_code def set_ipv6_nd_target(self, target): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TARGET) self._flow.ipv6_nd_target = target def set_ipv6_nd_sll(self, ipv6_nd_sll): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_SLL) self._flow.ipv6_nd_sll = ipv6_nd_sll def set_ipv6_nd_tll(self, ipv6_nd_tll): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TLL) self._flow.ipv6_nd_tll = ipv6_nd_tll def set_mpls_label(self, mpls_label): self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_LABEL) self._flow.mpls_label = mpls_label def set_mpls_tc(self, mpls_tc): self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_TC) self._flow.mpls_tc = mpls_tc def set_mpls_bos(self, bos): self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_BOS) self._flow.mpls_bos = bos def set_pbb_isid(self, isid): self._wc.ft_set(ofproto.OFPXMT_OFB_PBB_ISID) self._flow.pbb_isid = isid def set_pbb_isid_masked(self, isid, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_PBB_ISID) self._wc.pbb_isid_mask = mask self._flow.pbb_isid = isid def set_tunnel_id(self, tunnel_id): self._wc.ft_set(ofproto.OFPXMT_OFB_TUNNEL_ID) self._flow.tunnel_id = tunnel_id def set_tunnel_id_masked(self, tunnel_id, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_TUNNEL_ID) self._wc.tunnel_id_mask = mask self._flow.tunnel_id = tunnel_id def set_ipv6_exthdr(self, hdr): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_EXTHDR) self._flow.ipv6_exthdr = hdr def set_ipv6_exthdr_masked(self, hdr, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_EXTHDR) self._wc.ipv6_exthdr_mask = mask self._flow.ipv6_exthdr = hdr class OFPMatchField(StringifyMixin): _FIELDS_HEADERS = {} @staticmethod def register_field_header(headers): def _register_field_header(cls): for header in headers: OFPMatchField._FIELDS_HEADERS[header] = cls return cls return _register_field_header def __init__(self, header): self.header = header self.n_bytes = ofproto.oxm_tlv_header_extract_length(header) self.length = 0 @classmethod def cls_to_header(cls, cls_, hasmask): # XXX efficiency inv = dict((v, k) for k, v in cls._FIELDS_HEADERS.iteritems() if (((k >> 8) & 1) != 0) == hasmask) return inv[cls_] @staticmethod def make(header, value, mask=None): cls_ = OFPMatchField._FIELDS_HEADERS.get(header) return cls_(header, value, mask) @classmethod def parser(cls, buf, offset): (header,) = struct.unpack_from('!I', buf, offset) cls_ = OFPMatchField._FIELDS_HEADERS.get(header) if cls_: field = cls_.field_parser(header, buf, offset) else: field = OFPMatchField(header) field.length = (header & 0xff) + 4 return field @classmethod def field_parser(cls, header, buf, offset): hasmask = (header >> 8) & 1 mask = None if ofproto.oxm_tlv_header_extract_hasmask(header): pack_str = '!' + cls.pack_str[1:] * 2 (value, mask) = struct.unpack_from(pack_str, buf, offset + 4) else: (value,) = struct.unpack_from(cls.pack_str, buf, offset + 4) return cls(header, value, mask) def serialize(self, buf, offset): if ofproto.oxm_tlv_header_extract_hasmask(self.header): self.put_w(buf, offset, self.value, self.mask) else: self.put(buf, offset, self.value) def _put_header(self, buf, offset): msg_pack_into('!I', buf, offset, self.header) self.length = 4 def _put(self, buf, offset, value): msg_pack_into(self.pack_str, buf, offset, value) self.length += self.n_bytes def put_w(self, buf, offset, value, mask): self._put_header(buf, offset) self._put(buf, offset + self.length, value) self._put(buf, offset + self.length, mask) def put(self, buf, offset, value): self._put_header(buf, offset) self._put(buf, offset + self.length, value) def _putv6(self, buf, offset, value): msg_pack_into(self.pack_str, buf, offset, *value) self.length += self.n_bytes def putv6(self, buf, offset, value, mask=None): self._put_header(buf, offset) self._putv6(buf, offset + self.length, value) if mask and len(mask): self._putv6(buf, offset + self.length, mask) def oxm_len(self): return self.header & 0xff def to_jsondict(self): # remove some redundant attributes d = super(OFPMatchField, self).to_jsondict() v = d[self.__class__.__name__] del v['header'] del v['length'] del v['n_bytes'] return d @classmethod def from_jsondict(cls, dict_): # just pass the dict around. # it will be converted by OFPMatch.__init__(). return {cls.__name__: dict_} def stringify_attrs(self): f = super(OFPMatchField, self).stringify_attrs if not ofproto.oxm_tlv_header_extract_hasmask(self.header): # something like the following, but yield two values (k,v) # return itertools.ifilter(lambda k, v: k != 'mask', iter()) def g(): for k, v in f(): if k != 'mask': yield (k, v) return g() else: return f() @OFPMatchField.register_field_header([ofproto.OXM_OF_IN_PORT]) class MTInPort(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTInPort, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_METADATA, ofproto.OXM_OF_METADATA_W]) class MTMetadata(OFPMatchField): pack_str = '!Q' def __init__(self, header, value, mask=None): super(MTMetadata, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_IN_PHY_PORT]) class MTInPhyPort(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTInPhyPort, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ETH_DST, ofproto.OXM_OF_ETH_DST_W]) class MTEthDst(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTEthDst, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_ETH_SRC, ofproto.OXM_OF_ETH_SRC_W]) class MTEthSrc(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTEthSrc, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_ETH_TYPE]) class MTEthType(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTEthType, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_VLAN_VID, ofproto.OXM_OF_VLAN_VID_W]) class MTVlanVid(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTVlanVid, self).__init__(header) self.value = value self.mask = mask @classmethod def field_parser(cls, header, buf, offset): m = super(MTVlanVid, cls).field_parser(header, buf, offset) m.value &= ~ofproto.OFPVID_PRESENT return m @OFPMatchField.register_field_header([ofproto.OXM_OF_VLAN_PCP]) class MTVlanPcp(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTVlanPcp, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IP_DSCP]) class MTIPDscp(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTIPDscp, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IP_ECN]) class MTIPECN(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTIPECN, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IP_PROTO]) class MTIPProto(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTIPProto, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV4_SRC, ofproto.OXM_OF_IPV4_SRC_W]) class MTIPV4Src(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTIPV4Src, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV4_DST, ofproto.OXM_OF_IPV4_DST_W]) class MTIPV4Dst(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTIPV4Dst, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_TCP_SRC]) class MTTCPSrc(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTTCPSrc, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_TCP_DST]) class MTTCPDst(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTTCPDst, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_UDP_SRC]) class MTUDPSrc(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTUDPSrc, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_UDP_DST]) class MTUDPDst(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTUDPDst, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_SCTP_SRC]) class MTSCTPSrc(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTSCTPSrc, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_SCTP_DST]) class MTSCTPDst(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTSCTPDst, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ICMPV4_TYPE]) class MTICMPV4Type(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTICMPV4Type, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ICMPV4_CODE]) class MTICMPV4Code(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTICMPV4Code, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ARP_OP]) class MTArpOp(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTArpOp, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ARP_SPA, ofproto.OXM_OF_ARP_SPA_W]) class MTArpSpa(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTArpSpa, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_ARP_TPA, ofproto.OXM_OF_ARP_TPA_W]) class MTArpTpa(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTArpTpa, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_ARP_SHA, ofproto.OXM_OF_ARP_SHA_W]) class MTArpSha(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTArpSha, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_ARP_THA, ofproto.OXM_OF_ARP_THA_W]) class MTArpTha(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTArpTha, self).__init__(header) self.value = value self.mask = mask class MTIPv6(StringifyMixin): @classmethod def field_parser(cls, header, buf, offset): if ofproto.oxm_tlv_header_extract_hasmask(header): pack_str = '!' + cls.pack_str[1:] * 2 value = struct.unpack_from(pack_str, buf, offset + 4) return cls(header, list(value[:8]), list(value[8:])) else: value = struct.unpack_from(cls.pack_str, buf, offset + 4) return cls(header, list(value)) def serialize(self, buf, offset): self.putv6(buf, offset, self.value, self.mask) @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_SRC, ofproto.OXM_OF_IPV6_SRC_W]) class MTIPv6Src(MTIPv6, OFPMatchField): pack_str = '!8H' def __init__(self, header, value, mask=None): super(MTIPv6Src, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_DST, ofproto.OXM_OF_IPV6_DST_W]) class MTIPv6Dst(MTIPv6, OFPMatchField): pack_str = '!8H' def __init__(self, header, value, mask=None): super(MTIPv6Dst, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_FLABEL, ofproto.OXM_OF_IPV6_FLABEL_W]) class MTIPv6Flabel(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTIPv6Flabel, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_MPLS_LABEL]) class MTMplsLabel(OFPMatchField): pack_str = '!I' def __init__(self, header, value, mask=None): super(MTMplsLabel, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ICMPV6_TYPE]) class MTICMPV6Type(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTICMPV6Type, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_ICMPV6_CODE]) class MTICMPV6Code(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTICMPV6Code, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_ND_TARGET]) class MTIPv6NdTarget(MTIPv6, OFPMatchField): pack_str = '!8H' def __init__(self, header, value, mask=None): super(MTIPv6NdTarget, self).__init__(header) self.value = value def serialize(self, buf, offset): self.putv6(buf, offset, self.value) @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_ND_SLL]) class MTIPv6NdSll(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTIPv6NdSll, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_ND_TLL]) class MTIPv6NdTll(OFPMatchField): pack_str = '!6s' def __init__(self, header, value, mask=None): super(MTIPv6NdTll, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_MPLS_TC]) class MTMplsTc(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTMplsTc, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_MPLS_BOS]) class MTMplsBos(OFPMatchField): pack_str = '!B' def __init__(self, header, value, mask=None): super(MTMplsBos, self).__init__(header) self.value = value @OFPMatchField.register_field_header([ofproto.OXM_OF_PBB_ISID, ofproto.OXM_OF_PBB_ISID_W]) class MTPbbIsid(OFPMatchField): pack_str = '!3B' def __init__(self, header, value, mask=None): super(MTPbbIsid, self).__init__(header) self.value = value self.mask = mask @classmethod def field_parser(cls, header, buf, offset): hasmask = (header >> 8) & 1 mask = None if ofproto.oxm_tlv_header_extract_hasmask(header): pack_str = '!' + cls.pack_str[1:] * 2 (v1, v2, v3, m1, m2, m3) = struct.unpack_from(pack_str, buf, offset + 4) value = v1 << 16 | v2 << 8 | v3 mask = m1 << 16 | m2 << 8 | m3 else: (v1, v2, v3,) = struct.unpack_from(cls.pack_str, buf, offset + 4) value = v1 << 16 | v2 << 8 | v3 return cls(header, value, mask) def _put(self, buf, offset, value): msg_pack_into(self.pack_str, buf, offset, (value >> 16) & 0xff, (value >> 8) & 0xff, (value >> 0) & 0xff) self.length += self.n_bytes @OFPMatchField.register_field_header([ofproto.OXM_OF_TUNNEL_ID, ofproto.OXM_OF_TUNNEL_ID_W]) class MTTunnelId(OFPMatchField): pack_str = '!Q' def __init__(self, header, value, mask=None): super(MTTunnelId, self).__init__(header) self.value = value self.mask = mask @OFPMatchField.register_field_header([ofproto.OXM_OF_IPV6_EXTHDR, ofproto.OXM_OF_IPV6_EXTHDR_W]) class MTIPv6ExtHdr(OFPMatchField): pack_str = '!H' def __init__(self, header, value, mask=None): super(MTIPv6ExtHdr, self).__init__(header) self.value = value self.mask = mask @_register_parser @_set_msg_type(ofproto.OFPT_PACKET_IN) class OFPPacketIn(MsgBase): """ Packet-In message The switch sends the packet that received to the controller by this message. ============= ========================================================= Attribute Description ============= ========================================================= buffer_id ID assigned by datapath total_len Full length of frame reason Reason packet is being sent. | OFPR_NO_MATCH | OFPR_ACTION | OFPR_INVALID_TTL table_id ID of the table that was looked up cookie Cookie of the flow entry that was looked up match Instance of ``OFPMatch`` data Ethernet frame ============= ========================================================= Example:: @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) def packet_in_handler(self, ev): msg = ev.msg ofp = dp.ofproto if msg.reason == ofp.OFPR_NO_MATCH: reason = 'NO MATCH' elif msg.reason == ofp.OFPR_ACTION: reason = 'ACTION' elif msg.reason == ofp.OFPR_INVALID_TTL: reason = 'INVALID TTL' else: reason = 'unknown' self.logger.debug('OFPPacketIn received: ' 'buffer_id=%x total_len=%d reason=%s ' 'table_id=%d cookie=%d match=%s data=%s', msg.buffer_id, msg.total_len, reason, msg.table_id, msg.cookie, msg.match, utils.hex_array(msg.data)) """ def __init__(self, datapath, buffer_id=None, total_len=None, reason=None, table_id=None, cookie=None, match=None, data=None): super(OFPPacketIn, self).__init__(datapath) self.buffer_id = buffer_id self.total_len = total_len self.reason = reason self.table_id = table_id self.cookie = cookie self.match = match self.data = data @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPPacketIn, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.buffer_id, msg.total_len, msg.reason, msg.table_id, msg.cookie) = struct.unpack_from( ofproto.OFP_PACKET_IN_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.match = OFPMatch.parser(msg.buf, ofproto.OFP_PACKET_IN_SIZE - ofproto.OFP_MATCH_SIZE) match_len = utils.round_up(msg.match.length, 8) msg.data = msg.buf[(ofproto.OFP_PACKET_IN_SIZE - ofproto.OFP_MATCH_SIZE + match_len + 2):] if msg.total_len < len(msg.data): # discard padding for 8-byte alignment of OFP packet msg.data = msg.data[:msg.total_len] return msg @_register_parser @_set_msg_type(ofproto.OFPT_FLOW_REMOVED) class OFPFlowRemoved(MsgBase): """ Flow removed message When flow entries time out or are deleted, the switch notifies controller with this message. ================ ====================================================== Attribute Description ================ ====================================================== cookie Opaque controller-issued identifier priority Priority level of flow entry reason One of the following values. | OFPRR_IDLE_TIMEOUT | OFPRR_HARD_TIMEOUT | OFPRR_DELETE | OFPRR_GROUP_DELETE table_id ID of the table duration_sec Time flow was alive in seconds duration_nsec Time flow was alive in nanoseconds beyond duration_sec idle_timeout Idle timeout from original flow mod hard_timeout Hard timeout from original flow mod packet_count Number of packets that was associated with the flow byte_count Number of bytes that was associated with the flow match Instance of ``OFPMatch`` ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPFlowRemoved, MAIN_DISPATCHER) def flow_removed_handler(self, ev): msg = ev.msg dp = msg.datapath ofp = dp.ofproto if msg.reason == ofp.OFPRR_IDLE_TIMEOUT: reason = 'IDLE TIMEOUT' elif msg.reason == ofp.OFPRR_HARD_TIMEOUT: reason = 'HARD TIMEOUT' elif msg.reason == ofp.OFPRR_DELETE: reason = 'DELETE' elif msg.reason == ofp.OFPRR_GROUP_DELETE: reason = 'GROUP DELETE' else: reason = 'unknown' self.logger.debug('OFPFlowRemoved received: ' 'cookie=%d priority=%d reason=%s table_id=%d ' 'duration_sec=%d duration_nsec=%d ' 'idle_timeout=%d hard_timeout=%d ' 'packet_count=%d byte_count=%d match.fields=%s', msg.cookie, msg.priority, reason, msg.table_id, msg.duration_sec, msg.duration_nsec, msg.idle_timeout, msg.hard_timeout, msg.packet_count, msg.byte_count, msg.match) """ def __init__(self, datapath, cookie=None, priority=None, reason=None, table_id=None, duration_sec=None, duration_nsec=None, idle_timeout=None, hard_timeout=None, packet_count=None, byte_count=None, match=None): super(OFPFlowRemoved, self).__init__(datapath) self.cookie = cookie self.priority = priority self.reason = reason self.table_id = table_id self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.idle_timeout = idle_timeout self.hard_timeout = hard_timeout self.packet_count = packet_count self.byte_count = byte_count self.match = match @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPFlowRemoved, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.cookie, msg.priority, msg.reason, msg.table_id, msg.duration_sec, msg.duration_nsec, msg.idle_timeout, msg.hard_timeout, msg.packet_count, msg.byte_count) = struct.unpack_from( ofproto.OFP_FLOW_REMOVED_PACK_STR0, msg.buf, ofproto.OFP_HEADER_SIZE) offset = (ofproto.OFP_FLOW_REMOVED_SIZE - ofproto.OFP_MATCH_SIZE) msg.match = OFPMatch.parser(msg.buf, offset) return msg class OFPPort(ofproto_parser.namedtuple('OFPPort', ( 'port_no', 'hw_addr', 'name', 'config', 'state', 'curr', 'advertised', 'supported', 'peer', 'curr_speed', 'max_speed'))): _TYPE = { 'ascii': [ 'hw_addr', ], 'utf-8': [ # OF spec is unclear about the encoding of name. # we assumes UTF-8, which is used by OVS. 'name', ] } @classmethod def parser(cls, buf, offset): port = struct.unpack_from(ofproto.OFP_PORT_PACK_STR, buf, offset) port = list(port) i = cls._fields.index('hw_addr') port[i] = addrconv.mac.bin_to_text(port[i]) i = cls._fields.index('name') port[i] = port[i].rstrip('\0') ofpport = cls(*port) ofpport.length = ofproto.OFP_PORT_SIZE return ofpport @_register_parser @_set_msg_type(ofproto.OFPT_PORT_STATUS) class OFPPortStatus(MsgBase): """ Port status message The switch notifies controller of change of ports. ================ ====================================================== Attribute Description ================ ====================================================== reason One of the following values. | OFPPR_ADD | OFPPR_DELETE | OFPPR_MODIFY desc instance of ``OFPPort`` ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPPortStatus, MAIN_DISPATCHER) def port_status_handler(self, ev): msg = ev.msg dp = msg.datapath ofp = dp.ofproto if msg.reason == ofp.OFPPR_ADD: reason = 'ADD' elif msg.reason == ofp.OFPPR_DELETE: reason = 'DELETE' elif msg.reason == ofp.OFPPR_MODIFY: reason = 'MODIFY' else: reason = 'unknown' self.logger.debug('OFPPortStatus received: reason=%s desc=%s', reason, msg.desc) """ def __init__(self, datapath, reason=None, desc=None): super(OFPPortStatus, self).__init__(datapath) self.reason = reason self.desc = desc @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPPortStatus, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.reason = struct.unpack_from( ofproto.OFP_PORT_STATUS_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE)[0] msg.desc = OFPPort.parser(msg.buf, ofproto.OFP_PORT_STATUS_DESC_OFFSET) return msg @_set_msg_type(ofproto.OFPT_PACKET_OUT) class OFPPacketOut(MsgBase): """ Packet-Out message The controller uses this message to send a packet out throught the switch. ================ ====================================================== Attribute Description ================ ====================================================== buffer_id ID assigned by datapath (OFP_NO_BUFFER if none) in_port Packet's input port or ``OFPP_CONTROLLER`` actions list of OpenFlow action class data Packet data ================ ====================================================== Example:: def send_packet_out(self, datapath, buffer_id, in_port): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser actions = [ofp_parser.OFPActionOutput(ofp.OFPP_FLOOD, 0)] req = ofp_parser.OFPPacketOut(datapath, buffer_id, in_port, actions) datapath.send_msg(req) """ def __init__(self, datapath, buffer_id=None, in_port=None, actions=None, data=None, actions_len=None): assert in_port is not None super(OFPPacketOut, self).__init__(datapath) self.buffer_id = buffer_id self.in_port = in_port self.actions_len = 0 self.actions = actions self.data = data def _serialize_body(self): self.actions_len = 0 offset = ofproto.OFP_PACKET_OUT_SIZE for a in self.actions: a.serialize(self.buf, offset) offset += a.len self.actions_len += a.len if self.data is not None: assert self.buffer_id == 0xffffffff self.buf += self.data msg_pack_into(ofproto.OFP_PACKET_OUT_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.buffer_id, self.in_port, self.actions_len) @_set_msg_type(ofproto.OFPT_FLOW_MOD) class OFPFlowMod(MsgBase): """ Modify Flow entry message The controller sends this message to modify the flow table. ================ ====================================================== Attribute Description ================ ====================================================== cookie Opaque controller-issued identifier cookie_mask Mask used to restrict the cookie bits that must match when the command is ``OPFFC_MODIFY*`` or ``OFPFC_DELETE*`` table_id ID of the table to put the flow in command One of the following values. | OFPFC_ADD | OFPFC_MODIFY | OFPFC_MODIFY_STRICT | OFPFC_DELETE | OFPFC_DELETE_STRICT idle_timeout Idle time before discarding (seconds) hard_timeout Max time before discarding (seconds) priority Priority level of flow entry buffer_id Buffered packet to apply to (or OFP_NO_BUFFER) out_port For ``OFPFC_DELETE*`` commands, require matching entries to include this as an output port out_group For ``OFPFC_DELETE*`` commands, require matching entries to include this as an output group flags Bitmap of the following flags. | OFPFF_SEND_FLOW_REM | OFPFF_CHECK_OVERLAP | OFPFF_RESET_COUNTS | OFPFF_NO_PKT_COUNTS | OFPFF_NO_BYT_COUNTS match Instance of ``OFPMatch`` instructions list of ``OFPInstruction*`` instance ================ ====================================================== Example:: def send_flow_mod(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser cookie = cookie_mask = 0 table_id = 0 idle_timeout = hard_timeout = 0 priority = 32768 buffer_id = ofp.OFP_NO_BUFFER match = ofp_parser.OFPMatch(in_port=1, eth_dst='ff:ff:ff:ff:ff:ff') actions = [ofp_parser.OFPActionOutput(ofp.OFPP_NORMAL, 0)] inst = [ofp_parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions)] req = ofp_parser.OFPFlowMod(datapath, cookie, cookie_mask, table_id, ofp.OFPFC_ADD, idle_timeout, hard_timeout, priority, buffer_id, ofp.OFPP_ANY, ofp.OFPG_ANY, ofp.OFPFF_SEND_FLOW_REM, match, inst) datapath.send_msg(req) """ def __init__(self, datapath, cookie=0, cookie_mask=0, table_id=0, command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0, priority=ofproto.OFP_DEFAULT_PRIORITY, buffer_id=ofproto.OFP_NO_BUFFER, out_port=0, out_group=0, flags=0, match=None, instructions=[]): super(OFPFlowMod, self).__init__(datapath) self.cookie = cookie self.cookie_mask = cookie_mask self.table_id = table_id self.command = command self.idle_timeout = idle_timeout self.hard_timeout = hard_timeout self.priority = priority self.buffer_id = buffer_id self.out_port = out_port self.out_group = out_group self.flags = flags if match is None: match = OFPMatch() assert isinstance(match, OFPMatch) self.match = match for i in instructions: assert isinstance(i, OFPInstruction) self.instructions = instructions def _serialize_body(self): msg_pack_into(ofproto.OFP_FLOW_MOD_PACK_STR0, self.buf, ofproto.OFP_HEADER_SIZE, self.cookie, self.cookie_mask, self.table_id, self.command, self.idle_timeout, self.hard_timeout, self.priority, self.buffer_id, self.out_port, self.out_group, self.flags) offset = (ofproto.OFP_FLOW_MOD_SIZE - ofproto.OFP_MATCH_SIZE) match_len = self.match.serialize(self.buf, offset) offset += match_len for inst in self.instructions: inst.serialize(self.buf, offset) offset += inst.len class OFPInstruction(StringifyMixin): _INSTRUCTION_TYPES = {} @staticmethod def register_instruction_type(types): def _register_instruction_type(cls): for type_ in types: OFPInstruction._INSTRUCTION_TYPES[type_] = cls return cls return _register_instruction_type @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from('!HH', buf, offset) cls_ = cls._INSTRUCTION_TYPES.get(type_) return cls_.parser(buf, offset) @OFPInstruction.register_instruction_type([ofproto.OFPIT_GOTO_TABLE]) class OFPInstructionGotoTable(OFPInstruction): """ Goto table instruction This instruction indicates the next table in the processing pipeline. ================ ====================================================== Attribute Description ================ ====================================================== table_id Next table ================ ====================================================== """ def __init__(self, table_id, type_=None, len_=None): super(OFPInstructionGotoTable, self).__init__() self.type = ofproto.OFPIT_GOTO_TABLE self.len = ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE self.table_id = table_id @classmethod def parser(cls, buf, offset): (type_, len_, table_id) = struct.unpack_from( ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR, buf, offset) return cls(table_id) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR, buf, offset, self.type, self.len, self.table_id) @OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_METADATA]) class OFPInstructionWriteMetadata(OFPInstruction): """ Write metadata instruction This instruction writes the masked metadata value into the metadata field. ================ ====================================================== Attribute Description ================ ====================================================== metadata Metadata value to write metadata_mask Metadata write bitmask ================ ====================================================== """ def __init__(self, metadata, metadata_mask, type_=None, len_=None): super(OFPInstructionWriteMetadata, self).__init__() self.type = ofproto.OFPIT_WRITE_METADATA self.len = ofproto.OFP_INSTRUCTION_WRITE_METADATA_SIZE self.metadata = metadata self.metadata_mask = metadata_mask @classmethod def parser(cls, buf, offset): (type_, len_, metadata, metadata_mask) = struct.unpack_from( ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR, buf, offset) return cls(metadata, metadata_mask) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR, buf, offset, self.type, self.len, self.metadata, self.metadata_mask) @OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_ACTIONS, ofproto.OFPIT_APPLY_ACTIONS, ofproto.OFPIT_CLEAR_ACTIONS]) class OFPInstructionActions(OFPInstruction): """ Actions instruction This instruction writes/applies/clears the actions. ================ ====================================================== Attribute Description ================ ====================================================== type One of following values. | OFPIT_WRITE_ACTIONS | OFPIT_APPLY_ACTIONS | OFPIT_CLEAR_ACTIONS actions list of OpenFlow action class ================ ====================================================== ``type`` attribute corresponds to ``type_`` parameter of __init__. """ def __init__(self, type_, actions=None, len_=None): super(OFPInstructionActions, self).__init__() self.type = type_ for a in actions: assert isinstance(a, OFPAction) self.actions = actions @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR, buf, offset) offset += ofproto.OFP_INSTRUCTION_ACTIONS_SIZE actions = [] actions_len = len_ - ofproto.OFP_INSTRUCTION_ACTIONS_SIZE while actions_len > 0: a = OFPAction.parser(buf, offset) actions.append(a) actions_len -= a.len offset += a.len inst = cls(type_, actions) inst.len = len_ return inst def serialize(self, buf, offset): action_offset = offset + ofproto.OFP_INSTRUCTION_ACTIONS_SIZE if self.actions: for a in self.actions: a.serialize(buf, action_offset) action_offset += a.len self.len = action_offset - offset pad_len = utils.round_up(self.len, 8) - self.len msg_pack_into("%dx" % pad_len, buf, action_offset) self.len += pad_len msg_pack_into(ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR, buf, offset, self.type, self.len) @OFPInstruction.register_instruction_type([ofproto.OFPIT_METER]) class OFPInstructionMeter(OFPInstruction): """ Meter instruction This instruction applies the meter. ================ ====================================================== Attribute Description ================ ====================================================== meter_id Meter instance ================ ====================================================== """ def __init__(self, meter_id=1, type_=None, len_=None): super(OFPInstructionMeter, self).__init__() self.type = ofproto.OFPIT_METER self.len = ofproto.OFP_INSTRUCTION_METER_SIZE self.meter_id = meter_id @classmethod def parser(cls, buf, offset): (type_, len_, meter_id) = struct.unpack_from( ofproto.OFP_INSTRUCTION_METER_PACK_STR, buf, offset) return cls(meter_id) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_INSTRUCTION_METER_PACK_STR, buf, offset, self.type, self.len, self.meter_id) class OFPActionHeader(StringifyMixin): def __init__(self, type_, len_): self.type = type_ self.len = len_ def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset, self.type, self.len) class OFPAction(OFPActionHeader): _ACTION_TYPES = {} @staticmethod def register_action_type(type_, len_): def _register_action_type(cls): cls.cls_action_type = type_ cls.cls_action_len = len_ OFPAction._ACTION_TYPES[cls.cls_action_type] = cls return cls return _register_action_type def __init__(self): cls = self.__class__ super(OFPAction, self).__init__(cls.cls_action_type, cls.cls_action_len) @classmethod def parser(cls, buf, offset): type_, len_ = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) cls_ = cls._ACTION_TYPES.get(type_) assert cls_ is not None return cls_.parser(buf, offset) @OFPAction.register_action_type(ofproto.OFPAT_OUTPUT, ofproto.OFP_ACTION_OUTPUT_SIZE) class OFPActionOutput(OFPAction): """ Output action This action indicates output a packet to the switch port. ================ ====================================================== Attribute Description ================ ====================================================== port Output port max_len Max length to send to controller ================ ====================================================== """ def __init__(self, port, max_len=ofproto.OFPCML_MAX, type_=None, len_=None): super(OFPActionOutput, self).__init__() self.port = port self.max_len = max_len @classmethod def parser(cls, buf, offset): type_, len_, port, max_len = struct.unpack_from( ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset) return cls(port, max_len) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset, self.type, self.len, self.port, self.max_len) @OFPAction.register_action_type(ofproto.OFPAT_GROUP, ofproto.OFP_ACTION_GROUP_SIZE) class OFPActionGroup(OFPAction): """ Group action This action indicates the group used to process the packet. ================ ====================================================== Attribute Description ================ ====================================================== group_id Group identifier ================ ====================================================== """ def __init__(self, group_id=0, type_=None, len_=None): super(OFPActionGroup, self).__init__() self.group_id = group_id @classmethod def parser(cls, buf, offset): (type_, len_, group_id) = struct.unpack_from( ofproto.OFP_ACTION_GROUP_PACK_STR, buf, offset) return cls(group_id) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_GROUP_PACK_STR, buf, offset, self.type, self.len, self.group_id) @OFPAction.register_action_type(ofproto.OFPAT_SET_QUEUE, ofproto.OFP_ACTION_SET_QUEUE_SIZE) class OFPActionSetQueue(OFPAction): """ Set queue action This action sets the queue id that will be used to map a flow to an already-configured queue on a port. ================ ====================================================== Attribute Description ================ ====================================================== queue_id Queue ID for the packets ================ ====================================================== """ def __init__(self, queue_id, type_=None, len_=None): super(OFPActionSetQueue, self).__init__() self.queue_id = queue_id @classmethod def parser(cls, buf, offset): (type_, len_, queue_id) = struct.unpack_from( ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf, offset) return cls(queue_id) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf, offset, self.type, self.len, self.queue_id) @OFPAction.register_action_type(ofproto.OFPAT_SET_MPLS_TTL, ofproto.OFP_ACTION_MPLS_TTL_SIZE) class OFPActionSetMplsTtl(OFPAction): """ Set MPLS TTL action This action sets the MPLS TTL. ================ ====================================================== Attribute Description ================ ====================================================== mpls_ttl MPLS TTL ================ ====================================================== """ def __init__(self, mpls_ttl, type_=None, len_=None): super(OFPActionSetMplsTtl, self).__init__() self.mpls_ttl = mpls_ttl @classmethod def parser(cls, buf, offset): (type_, len_, mpls_ttl) = struct.unpack_from( ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf, offset) return cls(mpls_ttl) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf, offset, self.type, self.len, self.mpls_ttl) @OFPAction.register_action_type(ofproto.OFPAT_DEC_MPLS_TTL, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionDecMplsTtl(OFPAction): """ Decrement MPLS TTL action This action decrements the MPLS TTL. """ def __init__(self, type_=None, len_=None): super(OFPActionDecMplsTtl, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type(ofproto.OFPAT_SET_NW_TTL, ofproto.OFP_ACTION_NW_TTL_SIZE) class OFPActionSetNwTtl(OFPAction): """ Set IP TTL action This action sets the IP TTL. ================ ====================================================== Attribute Description ================ ====================================================== nw_ttl IP TTL ================ ====================================================== """ def __init__(self, nw_ttl, type_=None, len_=None): super(OFPActionSetNwTtl, self).__init__() self.nw_ttl = nw_ttl @classmethod def parser(cls, buf, offset): (type_, len_, nw_ttl) = struct.unpack_from( ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset) return cls(nw_ttl) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset, self.type, self.len, self.nw_ttl) @OFPAction.register_action_type(ofproto.OFPAT_DEC_NW_TTL, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionDecNwTtl(OFPAction): """ Decrement IP TTL action This action decrements the IP TTL. """ def __init__(self, type_=None, len_=None): super(OFPActionDecNwTtl, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_OUT, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionCopyTtlOut(OFPAction): """ Copy TTL Out action This action copies the TTL from the next-to-outermost header with TTL to the outermost header with TTL. """ def __init__(self, type_=None, len_=None): super(OFPActionCopyTtlOut, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_IN, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionCopyTtlIn(OFPAction): """ Copy TTL In action This action copies the TTL from the outermost header with TTL to the next-to-outermost header with TTL. """ def __init__(self, type_=None, len_=None): super(OFPActionCopyTtlIn, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type(ofproto.OFPAT_PUSH_VLAN, ofproto.OFP_ACTION_PUSH_SIZE) class OFPActionPushVlan(OFPAction): """ Push VLAN action This action pushes a new VLAN tag to the packet. ================ ====================================================== Attribute Description ================ ====================================================== ethertype Ether type. The default is 802.1Q. (0x8100) ================ ====================================================== """ def __init__(self, ethertype=ether.ETH_TYPE_8021Q, type_=None, len_=None): super(OFPActionPushVlan, self).__init__() self.ethertype = ethertype @classmethod def parser(cls, buf, offset): (type_, len_, ethertype) = struct.unpack_from( ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset) return cls(ethertype) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset, self.type, self.len, self.ethertype) @OFPAction.register_action_type(ofproto.OFPAT_PUSH_MPLS, ofproto.OFP_ACTION_PUSH_SIZE) class OFPActionPushMpls(OFPAction): """ Push MPLS action This action pushes a new MPLS header to the packet. ================ ====================================================== Attribute Description ================ ====================================================== ethertype Ether type ================ ====================================================== """ def __init__(self, ethertype=ether.ETH_TYPE_MPLS, type_=None, len_=None): super(OFPActionPushMpls, self).__init__() self.ethertype = ethertype @classmethod def parser(cls, buf, offset): (type_, len_, ethertype) = struct.unpack_from( ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset) return cls(ethertype) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset, self.type, self.len, self.ethertype) @OFPAction.register_action_type(ofproto.OFPAT_POP_VLAN, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionPopVlan(OFPAction): """ Pop VLAN action This action pops the outermost VLAN tag from the packet. """ def __init__(self, type_=None, len_=None): super(OFPActionPopVlan, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type(ofproto.OFPAT_POP_MPLS, ofproto.OFP_ACTION_POP_MPLS_SIZE) class OFPActionPopMpls(OFPAction): """ Pop MPLS action This action pops the MPLS header from the packet. """ def __init__(self, ethertype=ether.ETH_TYPE_IP, type_=None, len_=None): super(OFPActionPopMpls, self).__init__() self.ethertype = ethertype @classmethod def parser(cls, buf, offset): (type_, len_, ethertype) = struct.unpack_from( ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset) return cls(ethertype) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset, self.type, self.len, self.ethertype) @OFPAction.register_action_type(ofproto.OFPAT_SET_FIELD, ofproto.OFP_ACTION_SET_FIELD_SIZE) class OFPActionSetField(OFPAction): """ Set field action This action modifies a header field in the packet. The set of keywords available for this is same as OFPMatch. Example:: set_field = OFPActionSetField(eth_src="00:00:00:00:00") """ def __init__(self, field=None, **kwargs): # old api # OFPActionSetField(field) # new api # OFPActionSetField(eth_src="00:00:00:00:00") super(OFPActionSetField, self).__init__() if isinstance(field, OFPMatchField): # old api compat assert len(kwargs) == 0 self.field = field else: # new api assert len(kwargs) == 1 key = kwargs.keys()[0] value = kwargs[key] assert isinstance(key, (str, unicode)) assert not isinstance(value, tuple) # no mask self.key = key self.value = value @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_SET_FIELD_PACK_STR, buf, offset) (n, value, mask, _len) = ofproto.oxm_parse(buf, offset + 4) k, uv = ofproto.oxm_to_user(n, value, mask) action = cls(**{k: uv}) action.len = len_ # old api compat action.field = OFPMatchField.parser(buf, offset + 4) return action def serialize(self, buf, offset): # old api compat if self._composed_with_old_api(): return self.serialize_old(buf, offset) n, value, mask = ofproto.oxm_from_user(self.key, self.value) len_ = ofproto.oxm_serialize(n, value, mask, buf, offset + 4) self.len = utils.round_up(4 + len_, 8) msg_pack_into('!HH', buf, offset, self.type, self.len) pad_len = self.len - (4 + len_) msg_pack_into("%dx" % pad_len, buf, offset + 4 + len_) # XXX old api compat def serialize_old(self, buf, offset): len_ = ofproto.OFP_ACTION_SET_FIELD_SIZE + self.field.oxm_len() self.len = utils.round_up(len_, 8) pad_len = self.len - len_ msg_pack_into('!HH', buf, offset, self.type, self.len) self.field.serialize(buf, offset + 4) offset += len_ msg_pack_into("%dx" % pad_len, buf, offset) # XXX old api compat def _composed_with_old_api(self): return not hasattr(self, 'value') def to_jsondict(self): # XXX old api compat if self._composed_with_old_api(): # copy object first because serialize_old is destructive o2 = OFPActionSetField(self.field) # serialize and parse to fill new fields buf = bytearray() o2.serialize(buf, 0) o = OFPActionSetField.parser(str(buf), 0) else: o = self return { self.__class__.__name__: { 'field': ofproto.oxm_to_jsondict(self.key, self.value) } } @classmethod def from_jsondict(cls, dict_): k, v = ofproto.oxm_from_jsondict(dict_['field']) o = OFPActionSetField(**{k: v}) # XXX old api compat # serialize and parse to fill old attributes buf = bytearray() o.serialize(buf, 0) return OFPActionSetField.parser(str(buf), 0) # XXX old api compat def __str__(self): # XXX old api compat if self._composed_with_old_api(): # copy object first because serialize_old is destructive o2 = OFPActionSetField(self.field) # serialize and parse to fill new fields buf = bytearray() o2.serialize(buf, 0) o = OFPActionSetField.parser(str(buf), 0) else: o = self return super(OFPActionSetField, o).__str__() __repr__ = __str__ def stringify_attrs(self): yield (self.key, self.value) @OFPAction.register_action_type(ofproto.OFPAT_PUSH_PBB, ofproto.OFP_ACTION_PUSH_SIZE) class OFPActionPushPbb(OFPAction): """ Push PBB action This action pushes a new PBB header to the packet. ================ ====================================================== Attribute Description ================ ====================================================== ethertype Ether type ================ ====================================================== """ def __init__(self, ethertype, type_=None, len_=None): super(OFPActionPushPbb, self).__init__() self.ethertype = ethertype @classmethod def parser(cls, buf, offset): (type_, len_, ethertype) = struct.unpack_from( ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset) return cls(ethertype) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset, self.type, self.len, self.ethertype) @OFPAction.register_action_type(ofproto.OFPAT_POP_PBB, ofproto.OFP_ACTION_HEADER_SIZE) class OFPActionPopPbb(OFPAction): """ Pop PBB action This action pops the outermost PBB service instance header from the packet. """ def __init__(self, type_=None, len_=None): super(OFPActionPopPbb, self).__init__() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @classmethod def parser(cls, buf, offset): (type_, len_) = struct.unpack_from( ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset) return cls() @OFPAction.register_action_type( ofproto.OFPAT_EXPERIMENTER, ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE) class OFPActionExperimenter(OFPAction): """ Experimenter action This action is an extensible action for the experimenter. ================ ====================================================== Attribute Description ================ ====================================================== experimenter Experimenter ID ================ ====================================================== """ def __init__(self, experimenter): super(OFPActionExperimenter, self).__init__() self.type = ofproto.OFPAT_EXPERIMENTER self.experimenter = experimenter self.len = None @classmethod def parser(cls, buf, offset): (type_, len_, experimenter) = struct.unpack_from( ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR, buf, offset) data = buf[(offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE ): offset + len_] if experimenter == ofproto_common.NX_EXPERIMENTER_ID: obj = NXAction.parse(data) else: obj = OFPActionExperimenterUnknown(experimenter, data) obj.len = len_ return obj def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR, buf, offset, self.type, self.len, self.experimenter) class OFPActionExperimenterUnknown(OFPActionExperimenter): def __init__(self, experimenter, data=None, type_=None, len_=None): super(OFPActionExperimenterUnknown, self).__init__(experimenter=experimenter) self.data = data def serialize(self, buf, offset): # fixup data = self.data if data is None: data = bytearray() self.len = (utils.round_up(len(data), 8) + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE) super(OFPActionExperimenterUnknown, self).serialize(buf, offset) msg_pack_into('!%ds' % len(self.data), buf, offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE, self.data) class OFPBucket(StringifyMixin): def __init__(self, weight=0, watch_port=ofproto.OFPP_ANY, watch_group=ofproto.OFPG_ANY, actions=None, len_=None): super(OFPBucket, self).__init__() self.weight = weight self.watch_port = watch_port self.watch_group = watch_group self.actions = actions @classmethod def parser(cls, buf, offset): (len_, weight, watch_port, watch_group) = struct.unpack_from( ofproto.OFP_BUCKET_PACK_STR, buf, offset) msg = cls(weight, watch_port, watch_group, []) msg.len = len_ length = ofproto.OFP_BUCKET_SIZE offset += ofproto.OFP_BUCKET_SIZE while length < msg.len: action = OFPAction.parser(buf, offset) msg.actions.append(action) offset += action.len length += action.len return msg def serialize(self, buf, offset): action_offset = offset + ofproto.OFP_BUCKET_SIZE action_len = 0 for a in self.actions: a.serialize(buf, action_offset) action_offset += a.len action_len += a.len self.len = utils.round_up(ofproto.OFP_BUCKET_SIZE + action_len, 8) msg_pack_into(ofproto.OFP_BUCKET_PACK_STR, buf, offset, self.len, self.weight, self.watch_port, self.watch_group) @_set_msg_type(ofproto.OFPT_GROUP_MOD) class OFPGroupMod(MsgBase): """ Modify group entry message The controller sends this message to modify the group table. ================ ====================================================== Attribute Description ================ ====================================================== command One of the following values. | OFPGC_ADD | OFPGC_MODIFY | OFPGC_DELETE type One of the following values. | OFPGT_ALL | OFPGT_SELECT | OFPGT_INDIRECT | OFPGT_FF group_id Group identifier buckets list of ``OFPBucket`` ================ ====================================================== ``type`` attribute corresponds to ``type_`` parameter of __init__. Example:: def send_group_mod(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser port = 1 max_len = 2000 actions = [ofp_parser.OFPActionOutput(port, max_len)] weight = 100 watch_port = 0 watch_group = 0 buckets = [ofp_parser.OFPBucket(weight, watch_port, watch_group, actions)] group_id = 1 req = ofp_parser.OFPGroupMod(datapath, ofp.OFPGC_ADD, ofp.OFPGT_SELECT, group_id, buckets) datapath.send_msg(req) """ def __init__(self, datapath, command=ofproto.OFPGC_ADD, type_=ofproto.OFPGT_ALL, group_id=0, buckets=[]): super(OFPGroupMod, self).__init__(datapath) self.command = command self.type = type_ self.group_id = group_id self.buckets = buckets def _serialize_body(self): msg_pack_into(ofproto.OFP_GROUP_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.command, self.type, self.group_id) offset = ofproto.OFP_GROUP_MOD_SIZE for b in self.buckets: b.serialize(self.buf, offset) offset += b.len @_set_msg_type(ofproto.OFPT_PORT_MOD) class OFPPortMod(MsgBase): """ Port modification message The controller sneds this message to modify the behavior of the port. ================ ====================================================== Attribute Description ================ ====================================================== port_no Port number to modify hw_addr The hardware address that must be the same as hw_addr of ``OFPPort`` of ``OFPSwitchFeatures`` config Bitmap of configuration flags. | OFPPC_PORT_DOWN | OFPPC_NO_RECV | OFPPC_NO_FWD | OFPPC_NO_PACKET_IN mask Bitmap of configuration flags above to be changed advertise Bitmap of the following flags. | OFPPF_10MB_HD | OFPPF_10MB_FD | OFPPF_100MB_HD | OFPPF_100MB_FD | OFPPF_1GB_HD | OFPPF_1GB_FD | OFPPF_10GB_FD | OFPPF_40GB_FD | OFPPF_100GB_FD | OFPPF_1TB_FD | OFPPF_OTHER | OFPPF_COPPER | OFPPF_FIBER | OFPPF_AUTONEG | OFPPF_PAUSE | OFPPF_PAUSE_ASYM ================ ====================================================== Example:: def send_port_mod(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser port_no = 3 hw_addr = 'fa:c8:e8:76:1d:7e' config = 0 mask = (ofp.OFPPC_PORT_DOWN | ofp.OFPPC_NO_RECV | ofp.OFPPC_NO_FWD | ofp.OFPPC_NO_PACKET_IN) advertise = (ofp.OFPPF_10MB_HD | ofp.OFPPF_100MB_FD | ofp.OFPPF_1GB_FD | ofp.OFPPF_COPPER | ofp.OFPPF_AUTONEG | ofp.OFPPF_PAUSE | ofp.OFPPF_PAUSE_ASYM) req = ofp_parser.OFPPortMod(datapath, port_no, hw_addr, config, mask, advertise) datapath.send_msg(req) """ _TYPE = { 'ascii': [ 'hw_addr', ] } def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00', config=0, mask=0, advertise=0): super(OFPPortMod, self).__init__(datapath) self.port_no = port_no self.hw_addr = hw_addr self.config = config self.mask = mask self.advertise = advertise def _serialize_body(self): msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.port_no, addrconv.mac.text_to_bin(self.hw_addr), self.config, self.mask, self.advertise) @_set_msg_type(ofproto.OFPT_METER_MOD) class OFPMeterMod(MsgBase): """ Meter modification message The controller sends this message to modify the meter. ================ ====================================================== Attribute Description ================ ====================================================== command One of the following values. | OFPMC_ADD | OFPMC_MODIFY | OFPMC_DELETE flags Bitmap of the following flags. | OFPMF_KBPS | OFPMF_PKTPS | OFPMF_BURST | OFPMF_STATS meter_id Meter instance bands list of the following class instance. | OFPMeterBandDrop | OFPMeterBandDscpRemark | OFPMeterBandExperimenter ================ ====================================================== """ def __init__(self, datapath, command=ofproto.OFPMC_ADD, flags=ofproto.OFPMF_KBPS, meter_id=1, bands=[]): super(OFPMeterMod, self).__init__(datapath) self.command = command self.flags = flags self.meter_id = meter_id self.bands = bands def _serialize_body(self): msg_pack_into(ofproto.OFP_METER_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.command, self.flags, self.meter_id) offset = ofproto.OFP_METER_MOD_SIZE for b in self.bands: b.serialize(self.buf, offset) offset += b.len @_set_msg_type(ofproto.OFPT_TABLE_MOD) class OFPTableMod(MsgBase): """ Flow table configuration message The controller sends this message to configure table state. ================ ====================================================== Attribute Description ================ ====================================================== table_id ID of the table (OFPTT_ALL indicates all tables) config Bitmap of the following flags. OFPTC_DEPRECATED_MASK (3) ================ ====================================================== Example:: def send_table_mod(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPTableMod(datapath, 1, 3) datapath.send_msg(req) """ def __init__(self, datapath, table_id, config): super(OFPTableMod, self).__init__(datapath) self.table_id = table_id self.config = config def _serialize_body(self): msg_pack_into(ofproto.OFP_TABLE_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.table_id, self.config) def _set_stats_type(stats_type, stats_body_cls): def _set_cls_stats_type(cls): cls.cls_stats_type = stats_type cls.cls_stats_body_cls = stats_body_cls return cls return _set_cls_stats_type @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPMultipartRequest(MsgBase): def __init__(self, datapath, flags): super(OFPMultipartRequest, self).__init__(datapath) self.type = self.__class__.cls_stats_type self.flags = flags def _serialize_stats_body(self): pass def _serialize_body(self): msg_pack_into(ofproto.OFP_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.type, self.flags) self._serialize_stats_body() @_register_parser @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPMultipartReply(MsgBase): _STATS_MSG_TYPES = {} @staticmethod def register_stats_type(body_single_struct=False): def _register_stats_type(cls): assert cls.cls_stats_type is not None assert cls.cls_stats_type not in OFPMultipartReply._STATS_MSG_TYPES assert cls.cls_stats_body_cls is not None cls.cls_body_single_struct = body_single_struct OFPMultipartReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls return cls return _register_stats_type def __init__(self, datapath, body=None, flags=None): super(OFPMultipartReply, self).__init__(datapath) self.body = body self.flags = flags @classmethod def parser_stats_body(cls, buf, msg_len, offset): body_cls = cls.cls_stats_body_cls body = [] while offset < msg_len: entry = body_cls.parser(buf, offset) body.append(entry) offset += entry.length if cls.cls_body_single_struct: return body[0] return body @classmethod def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf): msg = MsgBase.parser.__func__( cls, datapath, version, msg_type, msg_len, xid, buf) msg.body = msg.parser_stats_body(msg.buf, msg.msg_len, ofproto.OFP_MULTIPART_REPLY_SIZE) return msg @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): type_, flags = struct.unpack_from( ofproto.OFP_MULTIPART_REPLY_PACK_STR, buffer(buf), ofproto.OFP_HEADER_SIZE) stats_type_cls = cls._STATS_MSG_TYPES.get(type_) msg = super(OFPMultipartReply, stats_type_cls).parser( datapath, version, msg_type, msg_len, xid, buf) msg.type = type_ msg.flags = flags offset = ofproto.OFP_MULTIPART_REPLY_SIZE body = [] while offset < msg_len: b = stats_type_cls.cls_stats_body_cls.parser(msg.buf, offset) body.append(b) offset += b.length if hasattr(b, 'length') else b.len if stats_type_cls.cls_body_single_struct: msg.body = body[0] else: msg.body = body return msg class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', ( 'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))): _TYPE = { 'ascii': [ 'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc', ] } @classmethod def parser(cls, buf, offset): desc = struct.unpack_from(ofproto.OFP_DESC_PACK_STR, buf, offset) desc = list(desc) desc = map(lambda x: x.rstrip('\0'), desc) stats = cls(*desc) stats.length = ofproto.OFP_DESC_SIZE return stats @_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPDescStatsRequest(OFPMultipartRequest): """ Description statistics request message The controller uses this message to query description of the switch. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_desc_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPDescStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPDescStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type(body_single_struct=True) @_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPDescStatsReply(OFPMultipartReply): """ Description statistics reply message The switch responds with this message to a description statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body Instance of ``OFPDescStats`` ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPDescStatsReply, MAIN_DISPATCHER) def desc_stats_reply_handler(self, ev): body = ev.msg.body self.logger.debug('DescStats: mfr_desc=%s hw_desc=%s sw_desc=%s ' 'serial_num=%s dp_desc=%s', body.mfr_desc, body.hw_desc, body.sw_desc, body.serial_num, body.dp_desc) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPDescStatsReply, self).__init__(datapath, **kwargs) class OFPFlowStats(StringifyMixin): def __init__(self, table_id=None, duration_sec=None, duration_nsec=None, priority=None, idle_timeout=None, hard_timeout=None, flags=None, cookie=None, packet_count=None, byte_count=None, match=None, instructions=None, length=None): super(OFPFlowStats, self).__init__() self.length = 0 self.table_id = table_id self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.priority = priority self.idle_timeout = idle_timeout self.hard_timeout = hard_timeout self.flags = flags self.cookie = cookie self.packet_count = packet_count self.byte_count = byte_count self.match = match self.instructions = instructions @classmethod def parser(cls, buf, offset): flow_stats = cls() (flow_stats.length, flow_stats.table_id, flow_stats.duration_sec, flow_stats.duration_nsec, flow_stats.priority, flow_stats.idle_timeout, flow_stats.hard_timeout, flow_stats.flags, flow_stats.cookie, flow_stats.packet_count, flow_stats.byte_count) = struct.unpack_from( ofproto.OFP_FLOW_STATS_0_PACK_STR, buf, offset) offset += ofproto.OFP_FLOW_STATS_0_SIZE flow_stats.match = OFPMatch.parser(buf, offset) match_length = utils.round_up(flow_stats.match.length, 8) inst_length = (flow_stats.length - (ofproto.OFP_FLOW_STATS_SIZE - ofproto.OFP_MATCH_SIZE + match_length)) offset += match_length instructions = [] while inst_length > 0: inst = OFPInstruction.parser(buf, offset) instructions.append(inst) offset += inst.len inst_length -= inst.len flow_stats.instructions = instructions return flow_stats class OFPFlowStatsRequestBase(OFPMultipartRequest): def __init__(self, datapath, flags, table_id, out_port, out_group, cookie, cookie_mask, match): super(OFPFlowStatsRequestBase, self).__init__(datapath, flags) self.table_id = table_id self.out_port = out_port self.out_group = out_group self.cookie = cookie self.cookie_mask = cookie_mask self.match = match def _serialize_stats_body(self): offset = ofproto.OFP_MULTIPART_REQUEST_SIZE msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_0_PACK_STR, self.buf, offset, self.table_id, self.out_port, self.out_group, self.cookie, self.cookie_mask) offset += ofproto.OFP_FLOW_STATS_REQUEST_0_SIZE self.match.serialize(self.buf, offset) @_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPFlowStatsRequest(OFPFlowStatsRequestBase): """ Individual flow statistics request message The controller uses this message to query individual flow statistics. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` table_id ID of table to read out_port Require matching entries to include this as an output port out_group Require matching entries to include this as an output group cookie Require matching entries to contain this cookie value cookie_mask Mask used to restrict the cookie bits that must match match Instance of ``OFPMatch`` ================ ====================================================== Example:: def send_flow_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser cookie = cookie_mask = 0 match = ofp_parser.OFPMatch(in_port=1) req = ofp_parser.OFPFlowStatsRequest(datapath, 0, ofp.OFPTT_ALL, ofp.OFPP_ANY, ofp.OFPG_ANY, cookie, cookie_mask, match) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, table_id=ofproto.OFPTT_ALL, out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY, cookie=0, cookie_mask=0, match=None, type_=None): if match is None: match = OFPMatch() super(OFPFlowStatsRequest, self).__init__(datapath, flags, table_id, out_port, out_group, cookie, cookie_mask, match) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPFlowStatsReply(OFPMultipartReply): """ Individual flow statistics reply message The switch responds with this message to an individual flow statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPFlowStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) def flow_stats_reply_handler(self, ev): flows = [] for stat in ev.msg.body: flows.append('table_id=%s ' 'duration_sec=%d duration_nsec=%d ' 'priority=%d ' 'idle_timeout=%d hard_timeout=%d flags=0x%04x ' 'cookie=%d packet_count=%d byte_count=%d ' 'match=%s instructions=%s' % (stat.table_id, stat.duration_sec, stat.duration_nsec, stat.priority, stat.idle_timeout, stat.hard_timeout, stat.flags, stat.cookie, stat.packet_count, stat.byte_count, stat.match, stat.instructions)) self.logger.debug('FlowStats: %s', flows) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPFlowStatsReply, self).__init__(datapath, **kwargs) class OFPAggregateStats(ofproto_parser.namedtuple('OFPAggregateStats', ( 'packet_count', 'byte_count', 'flow_count'))): @classmethod def parser(cls, buf, offset): agg = struct.unpack_from( ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset) stats = cls(*agg) stats.length = ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE return stats @_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPAggregateStatsRequest(OFPFlowStatsRequestBase): """ Aggregate flow statistics request message The controller uses this message to query aggregate flow statictics. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` table_id ID of table to read out_port Require matching entries to include this as an output port out_group Require matching entries to include this as an output group cookie Require matching entries to contain this cookie value cookie_mask Mask used to restrict the cookie bits that must match match Instance of ``OFPMatch`` ================ ====================================================== Example:: def send_aggregate_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser cookie = cookie_mask = 0 match = ofp_parser.OFPMatch(in_port=1) req = ofp_parser.OFPAggregateStatsRequest(datapath, 0, ofp.OFPTT_ALL, ofp.OFPP_ANY, ofp.OFPG_ANY, cookie, cookie_mask, match) datapath.send_msg(req) """ def __init__(self, datapath, flags, table_id, out_port, out_group, cookie, cookie_mask, match, type_=None): super(OFPAggregateStatsRequest, self).__init__(datapath, flags, table_id, out_port, out_group, cookie, cookie_mask, match) @OFPMultipartReply.register_stats_type(body_single_struct=True) @_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPAggregateStatsReply(OFPMultipartReply): """ Aggregate flow statistics reply message The switch responds with this message to an aggregate flow statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body Instance of ``OFPAggregateStats`` ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPAggregateStatsReply, MAIN_DISPATCHER) def aggregate_stats_reply_handler(self, ev): body = ev.msg.body self.logger.debug('AggregateStats: packet_count=%d byte_count=%d ' 'flow_count=%d', body.packet_count, body.byte_count, body.flow_count) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPAggregateStatsReply, self).__init__(datapath, **kwargs) class OFPTableStats(ofproto_parser.namedtuple('OFPTableStats', ( 'table_id', 'active_count', 'lookup_count', 'matched_count'))): @classmethod def parser(cls, buf, offset): tbl = struct.unpack_from(ofproto.OFP_TABLE_STATS_PACK_STR, buf, offset) stats = cls(*tbl) stats.length = ofproto.OFP_TABLE_STATS_SIZE return stats @_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPTableStatsRequest(OFPMultipartRequest): """ Table statistics request message The controller uses this message to query flow table statictics. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_table_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPTableStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPTableStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPTableStatsReply(OFPMultipartReply): """ Table statistics reply message The switch responds with this message to a table statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPTableStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPTableStatsReply, MAIN_DISPATCHER) def table_stats_reply_handler(self, ev): tables = [] for stat in ev.msg.body: tables.append('table_id=%d active_count=%d lookup_count=%d ' ' matched_count=%d' % (stat.table_id, stat.active_count, stat.lookup_count, stat.matched_count)) self.logger.debug('TableStats: %s', tables) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPTableStatsReply, self).__init__(datapath, **kwargs) class OFPPortStats(ofproto_parser.namedtuple('OFPPortStats', ( 'port_no', 'rx_packets', 'tx_packets', 'rx_bytes', 'tx_bytes', 'rx_dropped', 'tx_dropped', 'rx_errors', 'tx_errors', 'rx_frame_err', 'rx_over_err', 'rx_crc_err', 'collisions', 'duration_sec', 'duration_nsec'))): @classmethod def parser(cls, buf, offset): port = struct.unpack_from(ofproto.OFP_PORT_STATS_PACK_STR, buf, offset) stats = cls(*port) stats.length = ofproto.OFP_PORT_STATS_SIZE return stats @_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPPortStatsRequest(OFPMultipartRequest): """ Port statistics request message The controller uses this message to query information about ports statistics. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` port_no Port number to read (OFPP_ANY to all ports) ================ ====================================================== Example:: def send_port_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPPortStatsRequest(datapath, 0, ofp.OFPP_ANY) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY, type_=None): super(OFPPortStatsRequest, self).__init__(datapath, flags) self.port_no = port_no def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_PORT_STATS_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.port_no) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPPortStatsReply(OFPMultipartReply): """ Port statistics reply message The switch responds with this message to a port statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPPortStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER) def port_stats_reply_handler(self, ev): ports = [] for stat in ev.msg.body: ports.append('port_no=%d ' 'rx_packets=%d tx_packets=%d ' 'rx_bytes=%d tx_bytes=%d ' 'rx_dropped=%d tx_dropped=%d ' 'rx_errors=%d tx_errors=%d ' 'rx_frame_err=%d rx_over_err=%d rx_crc_err=%d ' 'collisions=%d duration_sec=%d duration_nsec=%d' % (stat.port_no, stat.rx_packets, stat.tx_packets, stat.rx_bytes, stat.tx_bytes, stat.rx_dropped, stat.tx_dropped, stat.rx_errors, stat.tx_errors, stat.rx_frame_err, stat.rx_over_err, stat.rx_crc_err, stat.collisions, stat.duration_sec, stat.duration_nsec)) self.logger.debug('PortStats: %s', ports) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPPortStatsReply, self).__init__(datapath, **kwargs) class OFPQueueStats(ofproto_parser.namedtuple('OFPQueueStats', ( 'port_no', 'queue_id', 'tx_bytes', 'tx_packets', 'tx_errors', 'duration_sec', 'duration_nsec'))): @classmethod def parser(cls, buf, offset): queue = struct.unpack_from(ofproto.OFP_QUEUE_STATS_PACK_STR, buf, offset) stats = cls(*queue) stats.length = ofproto.OFP_QUEUE_STATS_SIZE return stats @_set_stats_type(ofproto.OFPMP_QUEUE, OFPQueueStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPQueueStatsRequest(OFPMultipartRequest): """ Queue statistics request message The controller uses this message to query queue statictics. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` port_no Port number to read queue_id ID of queue to read ================ ====================================================== Example:: def send_queue_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPQueueStatsRequest(datapath, 0, ofp.OFPP_ANY, ofp.OFPQ_ALL) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY, queue_id=ofproto.OFPQ_ALL, type_=None): super(OFPQueueStatsRequest, self).__init__(datapath, flags) self.port_no = port_no self.queue_id = queue_id def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.port_no, self.queue_id) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_QUEUE, OFPQueueStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPQueueStatsReply(OFPMultipartReply): """ Queue statistics reply message The switch responds with this message to an aggregate flow statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPQueueStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPQueueStatsReply, MAIN_DISPATCHER) def queue_stats_reply_handler(self, ev): queues = [] for stat in ev.msg.body: queues.append('port_no=%d queue_id=%d ' 'tx_bytes=%d tx_packets=%d tx_errors=%d ' 'duration_sec=%d duration_nsec=%d' % (stat.port_no, stat.queue_id, stat.tx_bytes, stat.tx_packets, stat.tx_errors, stat.duration_sec, stat.duration_nsec)) self.logger.debug('QueueStats: %s', queues) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPQueueStatsReply, self).__init__(datapath, **kwargs) class OFPBucketCounter(StringifyMixin): def __init__(self, packet_count, byte_count): super(OFPBucketCounter, self).__init__() self.packet_count = packet_count self.byte_count = byte_count @classmethod def parser(cls, buf, offset): packet_count, byte_count = struct.unpack_from( ofproto.OFP_BUCKET_COUNTER_PACK_STR, buf, offset) return cls(packet_count, byte_count) class OFPGroupStats(StringifyMixin): def __init__(self, length=None, group_id=None, ref_count=None, packet_count=None, byte_count=None, duration_sec=None, duration_nsec=None, bucket_stats=None): super(OFPGroupStats, self).__init__() self.length = length self.group_id = group_id self.ref_count = ref_count self.packet_count = packet_count self.byte_count = byte_count self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.bucket_stats = bucket_stats @classmethod def parser(cls, buf, offset): group = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR, buf, offset) group_stats = cls(*group) group_stats.bucket_stats = [] total_len = group_stats.length + offset offset += ofproto.OFP_GROUP_STATS_SIZE while total_len > offset: b = OFPBucketCounter.parser(buf, offset) group_stats.bucket_stats.append(b) offset += ofproto.OFP_BUCKET_COUNTER_SIZE return group_stats @_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPGroupStatsRequest(OFPMultipartRequest): """ Group statistics request message The controller uses this message to query statistics of one or more groups. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` group_id ID of group to read (OFPG_ALL to all groups) ================ ====================================================== Example:: def send_group_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGroupStatsRequest(datapath, 0, ofp.OFPG_ALL) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, group_id=ofproto.OFPG_ALL, type_=None): super(OFPGroupStatsRequest, self).__init__(datapath, flags) self.group_id = group_id def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_GROUP_STATS_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.group_id) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPGroupStatsReply(OFPMultipartReply): """ Group statistics reply message The switch responds with this message to a group statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPGroupStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPGroupStatsReply, MAIN_DISPATCHER) def group_stats_reply_handler(self, ev): groups = [] for stat in ev.msg.body: groups.append('length=%d group_id=%d ' 'ref_count=%d packet_count=%d byte_count=%d ' 'duration_sec=%d duration_nsec=%d' % (stat.length, stat.group_id, stat.ref_count, stat.packet_count, stat.byte_count, stat.duration_sec, stat.duration_nsec)) self.logger.debug('GroupStats: %s', groups) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPGroupStatsReply, self).__init__(datapath, **kwargs) class OFPGroupDescStats(StringifyMixin): def __init__(self, type_=None, group_id=None, buckets=None, length=None): super(OFPGroupDescStats, self).__init__() self.type = type_ self.group_id = group_id self.buckets = buckets @classmethod def parser(cls, buf, offset): stats = cls() (stats.length, stats.type, stats.group_id) = struct.unpack_from( ofproto.OFP_GROUP_DESC_STATS_PACK_STR, buf, offset) offset += ofproto.OFP_GROUP_DESC_STATS_SIZE stats.buckets = [] length = ofproto.OFP_GROUP_DESC_STATS_SIZE while length < stats.length: bucket = OFPBucket.parser(buf, offset) stats.buckets.append(bucket) offset += bucket.len length += bucket.len return stats @_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPGroupDescStatsRequest(OFPMultipartRequest): """ Group description request message The controller uses this message to list the set of groups on a switch. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_group_desc_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGroupDescStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPGroupDescStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPGroupDescStatsReply(OFPMultipartReply): """ Group description reply message The switch responds with this message to a group description request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPGroupDescStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPGroupDescStatsReply, MAIN_DISPATCHER) def group_desc_stats_reply_handler(self, ev): descs = [] for stat in ev.msg.body: descs.append('length=%d type=%d group_id=%d ' 'buckets=%s' % (stat.length, stat.type, stat.group_id, stat.bucket)) self.logger.debug('GroupDescStats: %s', groups) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPGroupDescStatsReply, self).__init__(datapath, **kwargs) class OFPGroupFeaturesStats(ofproto_parser.namedtuple('OFPGroupFeaturesStats', ('types', 'capabilities', 'max_groups', 'actions'))): @classmethod def parser(cls, buf, offset): group_features = struct.unpack_from( ofproto.OFP_GROUP_FEATURES_PACK_STR, buf, offset) types = group_features[0] capabilities = group_features[1] max_groups = list(group_features[2:6]) actions = list(group_features[6:10]) stats = cls(types, capabilities, max_groups, actions) stats.length = ofproto.OFP_GROUP_FEATURES_SIZE return stats @_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPGroupFeaturesStatsRequest(OFPMultipartRequest): """ Group features request message The controller uses this message to list the capabilities of groups on a switch. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_group_features_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGroupFeaturesStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPGroupFeaturesStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type(body_single_struct=True) @_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPGroupFeaturesStatsReply(OFPMultipartReply): """ Group features reply message The switch responds with this message to a group features request. ================ ====================================================== Attribute Description ================ ====================================================== body Instance of ``OFPGroupFeaturesStats`` ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPGroupFeaturesStatsReply, MAIN_DISPATCHER) def group_features_stats_reply_handler(self, ev): body = ev.msg.body self.logger.debug('GroupFeaturesStats: types=%d ' 'capabilities=0x%08x max_groups=%s ' 'actions=%s', body.types, body.capabilities, body.max_groups, body.actions) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPGroupFeaturesStatsReply, self).__init__(datapath, **kwargs) class OFPMeterBandStats(StringifyMixin): def __init__(self, packet_band_count, byte_band_count): super(OFPMeterBandStats, self).__init__() self.packet_band_count = packet_band_count self.byte_band_count = byte_band_count @classmethod def parser(cls, buf, offset): band_stats = struct.unpack_from( ofproto.OFP_METER_BAND_STATS_PACK_STR, buf, offset) return cls(*band_stats) class OFPMeterStats(StringifyMixin): def __init__(self, meter_id=None, flow_count=None, packet_in_count=None, byte_in_count=None, duration_sec=None, duration_nsec=None, band_stats=None, len_=None): super(OFPMeterStats, self).__init__() self.meter_id = meter_id self.len = 0 self.flow_count = flow_count self.packet_in_count = packet_in_count self.byte_in_count = byte_in_count self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.band_stats = band_stats @classmethod def parser(cls, buf, offset): meter_stats = cls() (meter_stats.meter_id, meter_stats.len, meter_stats.flow_count, meter_stats.packet_in_count, meter_stats.byte_in_count, meter_stats.duration_sec, meter_stats.duration_nsec) = struct.unpack_from( ofproto.OFP_METER_STATS_PACK_STR, buf, offset) offset += ofproto.OFP_METER_STATS_SIZE meter_stats.band_stats = [] length = ofproto.OFP_METER_STATS_SIZE while length < meter_stats.len: band_stats = OFPMeterBandStats.parser(buf, offset) meter_stats.band_stats.append(band_stats) offset += ofproto.OFP_METER_BAND_STATS_SIZE length += ofproto.OFP_METER_BAND_STATS_SIZE return meter_stats @_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPMeterStatsRequest(OFPMultipartRequest): """ Meter statistics request message The controller uses this message to query statistics for one or more meters. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` meter_id ID of meter to read (OFPM_ALL to all meters) ================ ====================================================== Example:: def send_meter_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPMeterStatsRequest(datapath, 0, ofp.OFPM_ALL) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL, type_=None): super(OFPMeterStatsRequest, self).__init__(datapath, flags) self.meter_id = meter_id def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.meter_id) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPMeterStatsReply(OFPMultipartReply): """ Meter statistics reply message The switch responds with this message to a meter statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPMeterStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPMeterStatsReply, MAIN_DISPATCHER) def meter_stats_reply_handler(self, ev): meters = [] for stat in ev.msg.body: meters.append('meter_id=0x%08x len=%d flow_count=%d ' 'packet_in_count=%d byte_in_count=%d ' 'duration_sec=%d duration_nsec=%d ' 'band_stats=%s' % (stat.meter_id, stat.len, stat.flow_count, stat.packet_in_count, stat.byte_in_count, stat.duration_sec, stat.duration_nsec, stat.band_stats)) self.logger.debug('MeterStats: %s', meters) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPMeterStatsReply, self).__init__(datapath, **kwargs) class OFPMeterBand(StringifyMixin): def __init__(self, type_, len_): super(OFPMeterBand, self).__init__() self.type = type_ self.len = len_ class OFPMeterBandHeader(OFPMeterBand): _METER_BAND = {} @staticmethod def register_meter_band_type(type_, len_): def _register_meter_band_type(cls): OFPMeterBandHeader._METER_BAND[type_] = cls cls.cls_meter_band_type = type_ cls.cls_meter_band_len = len_ return cls return _register_meter_band_type def __init__(self): cls = self.__class__ super(OFPMeterBandHeader, self).__init__(cls.cls_meter_band_type, cls.cls_meter_band_len) @classmethod def parser(cls, buf, offset): type_, len_, _rate, _burst_size = struct.unpack_from( ofproto.OFP_METER_BAND_HEADER_PACK_STR, buf, offset) cls_ = cls._METER_BAND[type_] assert cls_.cls_meter_band_len == len_ return cls_.parser(buf, offset) @OFPMeterBandHeader.register_meter_band_type( ofproto.OFPMBT_DROP, ofproto.OFP_METER_BAND_DROP_SIZE) class OFPMeterBandDrop(OFPMeterBandHeader): def __init__(self, rate=0, burst_size=0, type_=None, len_=None): super(OFPMeterBandDrop, self).__init__() self.rate = rate self.burst_size = burst_size def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset, self.type, self.len, self.rate, self.burst_size) @classmethod def parser(cls, buf, offset): type_, len_, rate, burst_size = struct.unpack_from( ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset) assert cls.cls_meter_band_type == type_ assert cls.cls_meter_band_len == len_ return cls(rate, burst_size) @OFPMeterBandHeader.register_meter_band_type( ofproto.OFPMBT_DSCP_REMARK, ofproto.OFP_METER_BAND_DSCP_REMARK_SIZE) class OFPMeterBandDscpRemark(OFPMeterBandHeader): def __init__(self, rate=0, burst_size=0, prec_level=0, type_=None, len_=None): super(OFPMeterBandDscpRemark, self).__init__() self.rate = rate self.burst_size = burst_size self.prec_level = prec_level def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset, self.type, self.len, self.rate, self.burst_size, self.prec_level) @classmethod def parser(cls, buf, offset): type_, len_, rate, burst_size, prec_level = struct.unpack_from( ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset) assert cls.cls_meter_band_type == type_ assert cls.cls_meter_band_len == len_ return cls(rate, burst_size, prec_level) @OFPMeterBandHeader.register_meter_band_type( ofproto.OFPMBT_EXPERIMENTER, ofproto.OFP_METER_BAND_EXPERIMENTER_SIZE) class OFPMeterBandExperimenter(OFPMeterBandHeader): def __init__(self, rate=0, burst_size=0, experimenter=None, type_=None, len_=None): super(OFPMeterBandExperimenter, self).__init__() self.rate = rate self.burst_size = burst_size self.experimenter = experimenter def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf, offset, self.type, self.len, self.rate, self.burst_size, self.experimenter) @classmethod def parser(cls, buf, offset): type_, len_, rate, burst_size, experimenter = struct.unpack_from( ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf, offset) assert cls.cls_meter_band_type == type_ assert cls.cls_meter_band_len == len_ return cls(rate, burst_size, experimenter) class OFPMeterConfigStats(StringifyMixin): def __init__(self, flags=None, meter_id=None, bands=None, length=None): super(OFPMeterConfigStats, self).__init__() self.length = None self.flags = flags self.meter_id = meter_id self.bands = bands @classmethod def parser(cls, buf, offset): meter_config = cls() (meter_config.length, meter_config.flags, meter_config.meter_id) = struct.unpack_from( ofproto.OFP_METER_CONFIG_PACK_STR, buf, offset) offset += ofproto.OFP_METER_CONFIG_SIZE meter_config.bands = [] length = ofproto.OFP_METER_CONFIG_SIZE while length < meter_config.length: band = OFPMeterBandHeader.parser(buf, offset) meter_config.bands.append(band) offset += band.len length += band.len return meter_config @_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPMeterConfigStatsRequest(OFPMultipartRequest): """ Meter configuration statistics request message The controller uses this message to query configuration for one or more meters. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` meter_id ID of meter to read (OFPM_ALL to all meters) ================ ====================================================== Example:: def send_meter_config_stats_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPMeterConfigStatsRequest(datapath, 0, ofp.OFPM_ALL) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL, type_=None): super(OFPMeterConfigStatsRequest, self).__init__(datapath, flags) self.meter_id = meter_id def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.meter_id) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPMeterConfigStatsReply(OFPMultipartReply): """ Meter configuration statistics reply message The switch responds with this message to a meter configuration statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPMeterConfigStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPMeterConfigStatsReply, MAIN_DISPATCHER) def meter_config_stats_reply_handler(self, ev): configs = [] for stat in ev.msg.body: configs.append('length=%d flags=0x%04x meter_id=0x%08x ' 'bands=%s' % (stat.length, stat.flags, stat.meter_id, stat.bands)) self.logger.debug('MeterConfigStats: %s', configs) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPMeterConfigStatsReply, self).__init__(datapath, **kwargs) class OFPMeterFeaturesStats(ofproto_parser.namedtuple('OFPMeterFeaturesStats', ('max_meter', 'band_types', 'capabilities', 'max_bands', 'max_color'))): @classmethod def parser(cls, buf, offset): meter_features = struct.unpack_from( ofproto.OFP_METER_FEATURES_PACK_STR, buf, offset) stats = cls(*meter_features) stats.length = ofproto.OFP_METER_FEATURES_SIZE return stats @_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPMeterFeaturesStatsRequest(OFPMultipartRequest): """ Meter features statistics request message The controller uses this message to query the set of features of the metering subsystem. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_meter_features_stats_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPMeterFeaturesStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPMeterFeaturesStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPMeterFeaturesStatsReply(OFPMultipartReply): """ Meter features statistics reply message The switch responds with this message to a meter features statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPMeterFeaturesStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPMeterFeaturesStatsReply, MAIN_DISPATCHER) def meter_features_stats_reply_handler(self, ev): features = [] for stat in ev.msg.body: features.append('max_meter=%d band_types=0x%08x ' 'capabilities=0x%08x max_bands=%d ' 'max_color=%d' % (stat.max_meter, stat.band_types, stat.capabilities, stat.max_bands, stat.max_color)) self.logger.debug('MeterFeaturesStats: %s', configs) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPMeterFeaturesStatsReply, self).__init__(datapath, **kwargs) class OFPTableFeaturesStats(StringifyMixin): _TYPE = { 'utf-8': [ # OF spec is unclear about the encoding of name. # we assumes UTF-8. 'name', ] } def __init__(self, table_id=None, name=None, metadata_match=None, metadata_write=None, config=None, max_entries=None, properties=None, length=None): super(OFPTableFeaturesStats, self).__init__() self.length = None self.table_id = table_id self.name = name self.metadata_match = metadata_match self.metadata_write = metadata_write self.config = config self.max_entries = max_entries self.properties = properties @classmethod def parser(cls, buf, offset): table_features = cls() (table_features.length, table_features.table_id, name, table_features.metadata_match, table_features.metadata_write, table_features.config, table_features.max_entries ) = struct.unpack_from(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, offset) table_features.name = name.rstrip('\0') props = [] rest = buf[offset + ofproto.OFP_TABLE_FEATURES_SIZE: offset + table_features.length] while rest: p, rest = OFPTableFeatureProp.parse(rest) props.append(p) table_features.properties = props return table_features def serialize(self): # fixup bin_props = bytearray() for p in self.properties: bin_props += p.serialize() self.length = ofproto.OFP_TABLE_FEATURES_SIZE + len(bin_props) buf = bytearray() msg_pack_into(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, 0, self.length, self.table_id, self.name, self.metadata_match, self.metadata_write, self.config, self.max_entries) return buf + bin_props class OFPTableFeatureProp(StringifyMixin): _PACK_STR = '!HH' # type, length _TYPES = {} # OFPTFPT_ -> class def __init__(self, type_, length=None): self.type = type_ self.length = length @classmethod def register_type(cls, type_): def _register_type(subcls): cls._TYPES[type_] = subcls return subcls return _register_type @classmethod def parse(cls, buf): (type_, length,) = struct.unpack_from(cls._PACK_STR, buffer(buf), 0) bin_prop = buf[struct.calcsize(cls._PACK_STR):length] rest = buf[utils.round_up(length, 8):] try: subcls = cls._TYPES[type_] except KeyError: subcls = OFPTableFeaturePropUnknown kwargs = subcls._parse_prop(bin_prop) kwargs['type_'] = type_ kwargs['length'] = length return subcls(**kwargs), rest def serialize(self): # fixup bin_prop = self._serialize_prop() self.length = struct.calcsize(self._PACK_STR) + len(bin_prop) buf = bytearray() msg_pack_into(self._PACK_STR, buf, 0, self.type, self.length) pad_len = utils.round_up(self.length, 8) - self.length return buf + bin_prop + pad_len * '\0' class OFPTableFeaturePropUnknown(OFPTableFeatureProp): def __init__(self, type_, length=None, data=None): super(OFPTableFeaturePropUnknown, self).__init__(type_, length) self.data = data @classmethod def _parse_prop(cls, buf): return {'data': buf} def _serialize_prop(self): return self.data # Implementation note: While OpenFlow 1.3.2 shares the same ofp_instruction # for flow_mod and table_features, we have separate classes. We named this # class to match with OpenFlow 1.4's name. (ofp_instruction_id) class OFPInstructionId(StringifyMixin): _PACK_STR = '!HH' # type, len def __init__(self, type_, len_=None): self.type = type_ self.len = len_ # XXX experimenter @classmethod def parse(cls, buf): (type_, len_,) = struct.unpack_from(cls._PACK_STR, buffer(buf), 0) rest = buf[len_:] return cls(type_=type_, len_=len_), rest def serialize(self): # fixup self.len = struct.calcsize(self._PACK_STR) buf = bytearray() msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len) return buf @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS_MISS) class OFPTableFeaturePropInstructions(OFPTableFeatureProp): def __init__(self, type_, instruction_ids=[], length=None): super(OFPTableFeaturePropInstructions, self).__init__(type_, length) self.instruction_ids = instruction_ids @classmethod def _parse_prop(cls, buf): rest = buf ids = [] while rest: i, rest = OFPInstructionId.parse(rest) ids.append(i) return { 'instruction_ids': ids, } def _serialize_prop(self): bin_ids = bytearray() for i in self.instruction_ids: bin_ids += i.serialize() return bin_ids @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES_MISS) class OFPTableFeaturePropNextTables(OFPTableFeatureProp): _TABLE_ID_PACK_STR = '!B' def __init__(self, type_, table_ids=[], length=None): super(OFPTableFeaturePropNextTables, self).__init__(type_, length) self.table_ids = table_ids @classmethod def _parse_prop(cls, buf): rest = buf ids = [] while rest: (i,) = struct.unpack_from(cls._TABLE_ID_PACK_STR, buffer(rest), 0) rest = rest[struct.calcsize(cls._TABLE_ID_PACK_STR):] ids.append(i) return { 'table_ids': ids, } def _serialize_prop(self): bin_ids = bytearray() for i in self.table_ids: bin_id = bytearray() msg_pack_into(self._TABLE_ID_PACK_STR, bin_id, 0, i) bin_ids += bin_id return bin_ids # Implementation note: While OpenFlow 1.3.2 shares the same ofp_action_header # for flow_mod and table_features, we have separate classes. We named this # class to match with OpenFlow 1.4's name. (ofp_action_id) class OFPActionId(StringifyMixin): # XXX # ofp_action_header should have trailing pad bytes. # however, i guess it's a specification bug as: # - the spec explicitly says non-experimenter actions are 4 bytes # - linc/of_protocol doesn't use them # - OpenFlow 1.4 changed to use a separate structure _PACK_STR = '!HH' # type, len def __init__(self, type_, len_=None): self.type = type_ self.len = len_ # XXX experimenter @classmethod def parse(cls, buf): (type_, len_,) = struct.unpack_from(cls._PACK_STR, buffer(buf), 0) rest = buf[len_:] return cls(type_=type_, len_=len_), rest def serialize(self): # fixup self.len = struct.calcsize(self._PACK_STR) buf = bytearray() msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len) return buf @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS_MISS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS_MISS) class OFPTableFeaturePropActions(OFPTableFeatureProp): def __init__(self, type_, action_ids=[], length=None): super(OFPTableFeaturePropActions, self).__init__(type_, length) self.action_ids = action_ids @classmethod def _parse_prop(cls, buf): rest = buf ids = [] while rest: i, rest = OFPActionId.parse(rest) ids.append(i) return { 'action_ids': ids, } def _serialize_prop(self): bin_ids = bytearray() for i in self.action_ids: bin_ids += i.serialize() return bin_ids # Implementation note: OFPOxmId is specific to this implementation. # It does not have a corresponding structure in the specification. # (the specification uses plain uint32_t for non-experimenter OXMs # and uint64_t for experimenter OXMs.) # # i have taken a look at some of software switch implementations # but they all look broken or incomplete. according to the spec, # oxm_hasmask should be 1 if a switch supports masking for the type. # the right value for oxm_length is not clear from the spec. # update: OpenFlow 1.3.3 "clarified" that oxm_length here is the payload # length. it's still unclear if it should be doubled for hasmask or not, # though. # ofsoftswitch13 # oxm_hasmask always 0 # oxm_length same as ofp_match etc (as without mask) # linc/of_protocol # oxm_hasmask always 0 # oxm_length always 0 # ovs: # seems in flux as of writing this [20141003] class OFPOxmId(StringifyMixin): _PACK_STR = '!I' # oxm header _EXPERIMENTER_ID_PACK_STR = '!I' _TYPE = { 'ascii': [ 'type', ], } def __init__(self, type_, hasmask=False, length=None): self.type = type_ self.hasmask = hasmask self.length = length @classmethod def parse(cls, buf): (oxm,) = struct.unpack_from(cls._PACK_STR, buffer(buf), 0) # oxm (32 bit) == class (16) | field (7) | hasmask (1) | length (8) # in case of experimenter OXMs, another 32 bit value # (experimenter id) follows. (type_, _v) = ofproto.oxm_to_user(oxm >> (1 + 8), None, None) rest = buf[struct.calcsize(cls._PACK_STR):] hasmask = ofproto.oxm_tlv_header_extract_hasmask(oxm) length = oxm & 0xff # XXX see the comment on OFPOxmId class_ = oxm >> (7 + 1 + 8) if class_ == ofproto.OFPXMC_EXPERIMENTER: (exp_id,) = struct.unpack_from(cls._EXPERIMENTER_ID_PACK_STR, buffer(rest), 0) rest = rest[struct.calcsize(cls._EXPERIMENTER_ID_PACK_STR):] subcls = OFPExperimenterOxmId return subcls(type_=type_, exp_id=exp_id, hasmask=hasmask, length=length), rest else: return cls(type_=type_, hasmask=hasmask, length=length), rest def serialize(self): # fixup self.length = 0 # XXX see the comment on OFPOxmId (n, _v, _m) = ofproto.oxm_from_user(self.type, None) oxm = (n << (1 + 8)) | (self.hasmask << 8) | self.length buf = bytearray() msg_pack_into(self._PACK_STR, buf, 0, oxm) assert n >> 7 != ofproto.OFPXMC_EXPERIMENTER return buf class OFPExperimenterOxmId(OFPOxmId): def __init__(self, type_, exp_id, hasmask=False, length=None): super(OFPExperimenterOxmId, self).__init__(type_=type_, hasmask=hasmask, length=length) self.exp_id = exp_id def serialize(self): buf = super(OFPExperimenterOxmId, self).serialize() msg_pack_into(self._EXPERIMENTER_ID_PACK_STR, buf, struct.calcsize(self._PACK_STR), self.exp_id) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_MATCH) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WILDCARDS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD_MISS) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD_MISS) class OFPTableFeaturePropOxm(OFPTableFeatureProp): def __init__(self, type_, oxm_ids=[], length=None): super(OFPTableFeaturePropOxm, self).__init__(type_, length) self.oxm_ids = oxm_ids @classmethod def _parse_prop(cls, buf): rest = buf ids = [] while rest: i, rest = OFPOxmId.parse(rest) ids.append(i) return { 'oxm_ids': ids, } def _serialize_prop(self): bin_ids = bytearray() for i in self.oxm_ids: bin_ids += i.serialize() return bin_ids @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER) @OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_MISS) class OFPTableFeaturePropExperimenter(OFPTableFeatureProp): _DATA_ELEMENT_PACK_STR = '!I' _BODY_PACK_STR = '!II' def __init__(self, type_, experimenter=None, exp_type=None, data=None, length=None): self.type = type_ self.length = length self.experimenter = experimenter self.exp_type = exp_type self.data = data @classmethod def _parse_prop(cls, buf): (experimenter, exp_type) = struct.unpack_from(cls._BODY_PACK_STR, buf, 0) # Parse trailing data, a list of 4-byte words length = len(buf) data = [] pack_size = struct.calcsize(cls._DATA_ELEMENT_PACK_STR) offset = struct.calcsize(cls._BODY_PACK_STR) while offset < length: (word,) = struct.unpack_from(cls._DATA_ELEMENT_PACK_STR, buf, offset) data.append(word) offset += pack_size return { 'experimenter': experimenter, 'exp_type': exp_type, 'data': data, } def _serialize_prop(self): # experimenter, exp_type buf = bytearray() msg_pack_into(self._BODY_PACK_STR, buf, 0, self.experimenter, self.exp_type) # data if len(self.data): msg_pack_into('!%dI' % len(self.data), buf, len(buf), *self.data) return buf @_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPTableFeaturesStatsRequest(OFPMultipartRequest): """ Table features statistics request message The controller uses this message to query table features. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPTableFeaturesStats`` instances. The default is []. ================ ====================================================== """ def __init__(self, datapath, flags=0, body=[], type_=None): super(OFPTableFeaturesStatsRequest, self).__init__(datapath, flags) self.body = body def _serialize_stats_body(self): bin_body = bytearray() for p in self.body: bin_body += p.serialize() self.buf += bin_body @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPTableFeaturesStatsReply(OFPMultipartReply): """ Table features statistics reply message The switch responds with this message to a table features statistics request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPTableFeaturesStats`` instance ================ ====================================================== """ def __init__(self, datapath, type_=None, **kwargs): super(OFPTableFeaturesStatsReply, self).__init__(datapath, **kwargs) @_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPPortDescStatsRequest(OFPMultipartRequest): """ Port description request message The controller uses this message to query description of all the ports. ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` ================ ====================================================== Example:: def send_port_desc_stats_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPPortDescStatsRequest(datapath, 0) datapath.send_msg(req) """ def __init__(self, datapath, flags=0, type_=None): super(OFPPortDescStatsRequest, self).__init__(datapath, flags) @OFPMultipartReply.register_stats_type() @_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPPortDescStatsReply(OFPMultipartReply): """ Port description reply message The switch responds with this message to a port description request. ================ ====================================================== Attribute Description ================ ====================================================== body List of ``OFPPortDescStats`` instance ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPPortDescStatsReply, MAIN_DISPATCHER) def port_desc_stats_reply_handler(self, ev): ports = [] for p in ev.msg.body: ports.append('port_no=%d hw_addr=%s name=%s config=0x%08x ' 'state=0x%08x curr=0x%08x advertised=0x%08x ' 'supported=0x%08x peer=0x%08x curr_speed=%d ' 'max_speed=%d' % (p.port_no, p.hw_addr, p.name, p.config, p.state, p.curr, p.advertised, p.supported, p.peer, p.curr_speed, p.max_speed)) self.logger.debug('OFPPortDescStatsReply received: %s', ports) """ def __init__(self, datapath, type_=None, **kwargs): super(OFPPortDescStatsReply, self).__init__(datapath, **kwargs) class OFPExperimenterMultipart(ofproto_parser.namedtuple( 'OFPExperimenterMultipart', ('experimenter', 'exp_type', 'data'))): """ The body of OFPExperimenterStatsReply multipart messages. ================ ====================================================== Attribute Description ================ ====================================================== experimenter Experimenter ID exp_type Experimenter defined data Experimenter defined additional data ================ ====================================================== """ @classmethod def parser(cls, buf, offset): args = struct.unpack_from( ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR, buf, offset) args = list(args) args.append(buf[offset + ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE:]) stats = cls(*args) stats.length = ofproto.OFP_METER_FEATURES_SIZE return stats def serialize(self): buf = bytearray() msg_pack_into(ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR, buf, 0, self.experimenter, self.exp_type) return buf + self.data class OFPExperimenterStatsRequestBase(OFPMultipartRequest): def __init__(self, datapath, flags, experimenter, exp_type, type_=None): super(OFPExperimenterStatsRequestBase, self).__init__(datapath, flags) self.experimenter = experimenter self.exp_type = exp_type @_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPExperimenterStatsRequest(OFPExperimenterStatsRequestBase): """ Experimenter multipart request message ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` experimenter Experimenter ID exp_type Experimenter defined data Experimenter defined additional data ================ ====================================================== """ def __init__(self, datapath, flags, experimenter, exp_type, data, type_=None): super(OFPExperimenterStatsRequest, self).__init__(datapath, flags, experimenter, exp_type, type_) self.data = data def _serialize_stats_body(self): body = OFPExperimenterMultipart(experimenter=self.experimenter, exp_type=self.exp_type, data=self.data) self.buf += body.serialize() # NOTE: we use OFPMatch while on-wire does not ofp_match. # (OF1.4 version uses ofp_match.) class ONFFlowMonitorRequest(StringifyMixin): def __init__(self, id_, flags, match=OFPMatch(), out_port=ofproto.OFPP_ANY, table_id=ofproto.OFPTT_ALL, match_len=None): self.id = id_ self.flags = flags self.match_len = match_len self.out_port = out_port self.table_id = table_id self.match = match def serialize(self): # fixup match = self.match bin_match = bytearray() ofp_match_len = match.serialize(bin_match, 0) assert len(bin_match) == ofp_match_len match_len = match.length match_hdr_len = ofproto.OFP_MATCH_SIZE - 4 # exclude pad[4] # strip ofp_match header and trailing padding bin_match = bytes(bin_match)[match_hdr_len:match_len] self.match_len = len(bin_match) buf = bytearray() msg_pack_into(ofproto.ONF_FLOW_MONITOR_REQUEST_PACK_STR, buf, 0, self.id, self.flags, self.match_len, self.out_port, self.table_id) buf += bin_match pad_len = utils.round_up(self.match_len, 8) - self.match_len buf += pad_len * '\0' return buf @_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart) @_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class ONFFlowMonitorStatsRequest(OFPExperimenterStatsRequestBase): """ ================ ====================================================== Attribute Description ================ ====================================================== flags Zero or ``OFPMPF_REQ_MORE`` body List of ONFFlowMonitorRequest instances ================ ====================================================== """ def __init__(self, datapath, flags, body=[], type_=None, experimenter=None, exp_type=None): super(ONFFlowMonitorStatsRequest, self).__init__(datapath, flags, experimenter=ofproto_common.ONF_EXPERIMENTER_ID, exp_type=ofproto.ONFMP_FLOW_MONITOR) self.body = body def _serialize_stats_body(self): data = bytearray() for i in self.body: data += i.serialize() body = OFPExperimenterMultipart(experimenter=self.experimenter, exp_type=self.exp_type, data=data) self.buf += body.serialize() @OFPMultipartReply.register_stats_type(body_single_struct=True) @_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart) @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPExperimenterStatsReply(OFPMultipartReply): """ Experimenter multipart reply message ================ ====================================================== Attribute Description ================ ====================================================== body An ``OFPExperimenterMultipart`` instance ================ ====================================================== """ def __init__(self, datapath, type_=None, **kwargs): super(OFPExperimenterStatsReply, self).__init__(datapath, **kwargs) @_set_msg_type(ofproto.OFPT_BARRIER_REQUEST) class OFPBarrierRequest(MsgBase): """ Barrier request message The controller sends this message to ensure message dependencies have been met or receive notifications for completed operations. Example:: def send_barrier_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPBarrierRequest(datapath) datapath.send_msg(req) """ def __init__(self, datapath): super(OFPBarrierRequest, self).__init__(datapath) @_register_parser @_set_msg_type(ofproto.OFPT_BARRIER_REPLY) class OFPBarrierReply(MsgBase): """ Barrier reply message The switch responds with this message to a barrier request. Example:: @set_ev_cls(ofp_event.EventOFPBarrierReply, MAIN_DISPATCHER) def barrier_reply_handler(self, ev): self.logger.debug('OFPBarrierReply received') """ def __init__(self, datapath): super(OFPBarrierReply, self).__init__(datapath) @_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST) class OFPQueueGetConfigRequest(MsgBase): """ Queue configuration request message ================ ====================================================== Attribute Description ================ ====================================================== port Port to be queried (OFPP_ANY to all configured queues) ================ ====================================================== Example:: def send_queue_get_config_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPQueueGetConfigRequest(datapath, ofp.OFPP_ANY) datapath.send_msg(req) """ def __init__(self, datapath, port): super(OFPQueueGetConfigRequest, self).__init__(datapath) self.port = port def _serialize_body(self): msg_pack_into(ofproto.OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.port) class OFPQueuePropHeader(StringifyMixin): def __init__(self, property_, len_): self.property = property_ self.len = len_ def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR, buf, offset, self.property, self.len) class OFPQueueProp(OFPQueuePropHeader): _QUEUE_PROP_PROPERTIES = {} @staticmethod def register_queue_property(property_, len_): def _register_queue_property(cls): cls.cls_property = property_ cls.cls_len = len_ OFPQueueProp._QUEUE_PROP_PROPERTIES[cls.cls_property] = cls return cls return _register_queue_property def __init__(self): cls = self.__class__ super(OFPQueueProp, self).__init__(cls.cls_property, cls.cls_len) @classmethod def parser(cls, buf, offset): (property_, len_) = struct.unpack_from( ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR, buf, offset) cls_ = cls._QUEUE_PROP_PROPERTIES.get(property_) offset += ofproto.OFP_QUEUE_PROP_HEADER_SIZE return cls_.parser(buf, offset) @OFPQueueProp.register_queue_property( ofproto.OFPQT_MIN_RATE, ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE) class OFPQueuePropMinRate(OFPQueueProp): def __init__(self, rate, property_=None, len_=None): super(OFPQueuePropMinRate, self).__init__() self.rate = rate @classmethod def parser(cls, buf, offset): (rate,) = struct.unpack_from( ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR, buf, offset) return cls(rate) @OFPQueueProp.register_queue_property( ofproto.OFPQT_MAX_RATE, ofproto.OFP_QUEUE_PROP_MAX_RATE_SIZE) class OFPQueuePropMaxRate(OFPQueueProp): def __init__(self, rate, property_=None, len_=None): super(OFPQueuePropMaxRate, self).__init__() self.rate = rate @classmethod def parser(cls, buf, offset): (rate,) = struct.unpack_from( ofproto.OFP_QUEUE_PROP_MAX_RATE_PACK_STR, buf, offset) return cls(rate) # TODO: add ofp_queue_prop_experimenter class OFPPacketQueue(StringifyMixin): def __init__(self, queue_id, port, properties, len_=None): super(OFPPacketQueue, self).__init__() self.queue_id = queue_id self.port = port self.len = len_ self.properties = properties @classmethod def parser(cls, buf, offset): (queue_id, port, len_) = struct.unpack_from( ofproto.OFP_PACKET_QUEUE_PACK_STR, buf, offset) length = ofproto.OFP_PACKET_QUEUE_SIZE offset += ofproto.OFP_PACKET_QUEUE_SIZE properties = [] while length < len_: queue_prop = OFPQueueProp.parser(buf, offset) properties.append(queue_prop) offset += queue_prop.len length += queue_prop.len o = cls(queue_id, port, properties) o.len = len_ return o @_register_parser @_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REPLY) class OFPQueueGetConfigReply(MsgBase): """ Queue configuration reply message The switch responds with this message to a queue configuration request. ================ ====================================================== Attribute Description ================ ====================================================== queues list of ``OFPPacketQueue`` instance port Port which was queried ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPQueueGetConfigReply, MAIN_DISPATCHER) def queue_get_config_reply_handler(self, ev): msg = ev.msg self.logger.debug('OFPQueueGetConfigReply received: ' 'port=%s queues=%s', msg.port, msg.queues) """ def __init__(self, datapath, queues=None, port=None): super(OFPQueueGetConfigReply, self).__init__(datapath) self.queues = queues self.port = port @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPQueueGetConfigReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.port,) = struct.unpack_from( ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.queues = [] offset = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE while offset < msg_len: queue = OFPPacketQueue.parser(msg.buf, offset) msg.queues.append(queue) offset += queue.len return msg @_set_msg_type(ofproto.OFPT_ROLE_REQUEST) class OFPRoleRequest(MsgBase): """ Role request message The controller uses this message to change its role. ================ ====================================================== Attribute Description ================ ====================================================== role One of the following values. | OFPCR_ROLE_NOCHANGE | OFPCR_ROLE_EQUAL | OFPCR_ROLE_MASTER | OFPCR_ROLE_SLAVE generation_id Master Election Generation ID ================ ====================================================== Example:: def send_role_request(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPRoleRequest(datapath, ofp.OFPCR_ROLE_EQUAL, 0) datapath.send_msg(req) """ def __init__(self, datapath, role=None, generation_id=None): super(OFPRoleRequest, self).__init__(datapath) self.role = role self.generation_id = generation_id def _serialize_body(self): assert self.role is not None assert self.generation_id is not None msg_pack_into(ofproto.OFP_ROLE_REQUEST_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.role, self.generation_id) @_register_parser @_set_msg_type(ofproto.OFPT_ROLE_REPLY) class OFPRoleReply(MsgBase): """ Role reply message The switch responds with this message to a role request. ================ ====================================================== Attribute Description ================ ====================================================== role One of the following values. | OFPCR_ROLE_NOCHANGE | OFPCR_ROLE_EQUAL | OFPCR_ROLE_MASTER | OFPCR_ROLE_SLAVE generation_id Master Election Generation ID ================ ====================================================== Example:: @set_ev_cls(ofp_event.EventOFPRoleReply, MAIN_DISPATCHER) def role_reply_handler(self, ev): msg = ev.msg ofp = dp.ofproto if msg.role == ofp.OFPCR_ROLE_NOCHANGE: role = 'NOCHANGE' elif msg.role == ofp.OFPCR_ROLE_EQUAL: role = 'EQUAL' elif msg.role == ofp.OFPCR_ROLE_MASTER: role = 'MASTER' elif msg.role == ofp.OFPCR_ROLE_SLAVE: role = 'SLAVE' else: role = 'unknown' self.logger.debug('OFPRoleReply received: ' 'role=%s generation_id=%d', role, msg.generation_id) """ def __init__(self, datapath, role=None, generation_id=None): super(OFPRoleReply, self).__init__(datapath) self.role = role self.generation_id = generation_id @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPRoleReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (msg.role, msg.generation_id) = struct.unpack_from( ofproto.OFP_ROLE_REQUEST_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) return msg @_set_msg_type(ofproto.OFPT_GET_ASYNC_REQUEST) class OFPGetAsyncRequest(MsgBase): """ Get asynchronous configuration request message The controller uses this message to query the asynchronous message. Example:: def send_get_async_request(self, datapath): ofp_parser = datapath.ofproto_parser req = ofp_parser.OFPGetAsyncRequest(datapath) datapath.send_msg(req) """ def __init__(self, datapath): super(OFPGetAsyncRequest, self).__init__(datapath) @_register_parser @_set_msg_type(ofproto.OFPT_GET_ASYNC_REPLY) class OFPGetAsyncReply(MsgBase): """ Get asynchronous configuration reply message The switch responds with this message to a get asynchronous configuration request. ================== ==================================================== Attribute Description ================== ==================================================== packet_in_mask 2-element array: element 0, when the controller has a OFPCR_ROLE_EQUAL or OFPCR_ROLE_MASTER role. element 1, OFPCR_ROLE_SLAVE role controller. Bitmasks of following values. | OFPR_NO_MATCH | OFPR_ACTION | OFPR_INVALID_TTL port_status_mask 2-element array. Bitmasks of following values. | OFPPR_ADD | OFPPR_DELETE | OFPPR_MODIFY flow_removed_mask 2-element array. Bitmasks of following values. | OFPRR_IDLE_TIMEOUT | OFPRR_HARD_TIMEOUT | OFPRR_DELETE | OFPRR_GROUP_DELETE ================== ==================================================== Example:: @set_ev_cls(ofp_event.EventOFPGetAsyncReply, MAIN_DISPATCHER) def get_async_reply_handler(self, ev): msg = ev.msg self.logger.debug('OFPGetAsyncReply received: ' 'packet_in_mask=0x%08x:0x%08x ' 'port_status_mask=0x%08x:0x%08x ' 'flow_removed_mask=0x%08x:0x%08x', msg.packet_in_mask[0], msg.packet_in_mask[1], msg.port_status_mask[0], msg.port_status_mask[1], msg.flow_removed_mask[0], msg.flow_removed_mask[1]) """ def __init__(self, datapath, packet_in_mask=None, port_status_mask=None, flow_removed_mask=None): super(OFPGetAsyncReply, self).__init__(datapath) self.packet_in_mask = packet_in_mask self.port_status_mask = port_status_mask self.flow_removed_mask = flow_removed_mask @classmethod def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPGetAsyncReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf) (packet_in_mask_m, packet_in_mask_s, port_status_mask_m, port_status_mask_s, flow_removed_mask_m, flow_removed_mask_s) = struct.unpack_from( ofproto.OFP_ASYNC_CONFIG_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE) msg.packet_in_mask = [packet_in_mask_m, packet_in_mask_s] msg.port_status_mask = [port_status_mask_m, port_status_mask_s] msg.flow_removed_mask = [flow_removed_mask_m, flow_removed_mask_s] return msg @_set_msg_type(ofproto.OFPT_SET_ASYNC) class OFPSetAsync(MsgBase): """ Set asynchronous configuration message The controller sends this message to set the asynchronous messages that it wants to receive on a given OpneFlow channel. ================== ==================================================== Attribute Description ================== ==================================================== packet_in_mask 2-element array: element 0, when the controller has a OFPCR_ROLE_EQUAL or OFPCR_ROLE_MASTER role. element 1, OFPCR_ROLE_SLAVE role controller. Bitmasks of following values. | OFPR_NO_MATCH | OFPR_ACTION | OFPR_INVALID_TTL port_status_mask 2-element array. Bitmasks of following values. | OFPPR_ADD | OFPPR_DELETE | OFPPR_MODIFY flow_removed_mask 2-element array. Bitmasks of following values. | OFPRR_IDLE_TIMEOUT | OFPRR_HARD_TIMEOUT | OFPRR_DELETE | OFPRR_GROUP_DELETE ================== ==================================================== Example:: def send_set_async(self, datapath): ofp = datapath.ofproto ofp_parser = datapath.ofproto_parser packet_in_mask = ofp.OFPR_ACTION | ofp.OFPR_INVALID_TTL port_status_mask = (ofp.OFPPR_ADD | ofp.OFPPR_DELETE | ofp.OFPPR_MODIFY) flow_removed_mask = (ofp.OFPRR_IDLE_TIMEOUT | ofp.OFPRR_HARD_TIMEOUT | ofp.OFPRR_DELETE) req = ofp_parser.OFPSetAsync(datapath, [packet_in_mask, 0], [port_status_mask, 0], [flow_removed_mask, 0]) datapath.send_msg(req) """ def __init__(self, datapath, packet_in_mask, port_status_mask, flow_removed_mask): super(OFPSetAsync, self).__init__(datapath) self.packet_in_mask = packet_in_mask self.port_status_mask = port_status_mask self.flow_removed_mask = flow_removed_mask def _serialize_body(self): msg_pack_into(ofproto.OFP_ASYNC_CONFIG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.packet_in_mask[0], self.packet_in_mask[1], self.port_status_mask[0], self.port_status_mask[1], self.flow_removed_mask[0], self.flow_removed_mask[1]) from ryu.ofproto import nx_actions nx_actions.generate( 'ryu.ofproto.ofproto_v1_3', 'ryu.ofproto.ofproto_v1_3_parser' )
{ "content_hash": "4ad723803b29105e4222c760e23630fe", "timestamp": "", "source": "github", "line_count": 6046, "max_line_length": 80, "avg_line_length": 36.19765133972874, "alnum_prop": 0.5336781645960037, "repo_name": "jalilm/ryu", "id": "cc2f3299ef23964df3fb30d7b22968a149445aa7", "size": "219534", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ryu/ofproto/ofproto_v1_3_parser.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "8679" }, { "name": "CSS", "bytes": "306" }, { "name": "Erlang", "bytes": "871862" }, { "name": "HTML", "bytes": "306" }, { "name": "JavaScript", "bytes": "8436" }, { "name": "Makefile", "bytes": "1213" }, { "name": "Python", "bytes": "5382810" }, { "name": "Shell", "bytes": "14253" } ], "symlink_target": "" }
import geopy.exc as exc import geopy.geocoders as geocoders class BaseError(Exception): pass class GeolocationFailure(BaseError): pass class GeolocationError(BaseError): pass class TemporaryError(BaseError): pass class Geolocator(object): def __init__(self): self._geocoder = geocoders.Nominatim(timeout=5, country_bias='fr') def geocode(self, address): if not isinstance(address, basestring) \ and not isinstance(address, dict): err_msg = u"address should either be of type: %s, or of type %s." \ % (basestring, dict) raise TypeError(err_msg) try: geolocation = self._geocoder.geocode(address) if not geolocation: err_msg = u"Couldn't resolve the following address: '%s'" \ % address raise GeolocationFailure(err_msg) except (exc.GeocoderQuotaExceeded, exc.GeocoderUnavailable, exc.GeocoderTimedOut) as e: raise TemporaryError(u'Geolocator error: %s' % e) except exc.GeocoderServiceError as e: raise GeolocationError(u'Geolocator error: %s' % e) return geolocation
{ "content_hash": "6091e58289b3eb517ed91725be93c5d8", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 79, "avg_line_length": 27.217391304347824, "alnum_prop": 0.6006389776357828, "repo_name": "pyjobs/web", "id": "8a0f1e4af08ad618ee53ceb5ec02409012d3cc27", "size": "1276", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pyjobs_web/pyjobsweb/lib/geolocation.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "39378" }, { "name": "JavaScript", "bytes": "1731" }, { "name": "Makefile", "bytes": "52181" }, { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "219209" }, { "name": "Shell", "bytes": "2815" } ], "symlink_target": "" }
from neutron.extensions import providernet as provider from neutron.i18n import _LI from neutron.openstack.common import log as logging LOG = logging.getLogger(__name__) class Plumlib(object): """ Class PLUMgrid Fake Library. This library is a by-pass implementation for the PLUMgrid Library. This class is being used by the unit test integration in Neutron. """ def __init__(self): LOG.info(_LI('Python PLUMgrid Fake Library Started ')) def director_conn(self, director_plumgrid, director_port, timeout, director_admin, director_password): LOG.info(_LI('Fake Director: %s'), director_plumgrid + ':' + director_port) def create_network(self, tenant_id, net_db, network): net_db["network"] = {} for key in (provider.NETWORK_TYPE, provider.PHYSICAL_NETWORK, provider.SEGMENTATION_ID): net_db["network"][key] = network["network"][key] return net_db def update_network(self, tenant_id, net_id): pass def delete_network(self, net_db, net_id): pass def create_subnet(self, sub_db, net_db, ipnet): pass def update_subnet(self, orig_sub_db, new_sub_db, ipnet): pass def delete_subnet(self, tenant_id, net_db, net_id): pass def create_port(self, port_db, router_db): pass def update_port(self, port_db, router_db): pass def delete_port(self, port_db, router_db): pass def create_router(self, tenant_id, router_db): pass def update_router(self, router_db, router_id): pass def delete_router(self, tenant_id, router_id): pass def add_router_interface(self, tenant_id, router_id, port_db, ipnet): pass def remove_router_interface(self, tenant_id, net_id, router_id): pass def create_floatingip(self, floating_ip): pass def update_floatingip(self, floating_ip_orig, floating_ip, id): pass def delete_floatingip(self, floating_ip_orig, id): pass def disassociate_floatingips(self, fip, port_id): return dict((key, fip[key]) for key in ("id", "floating_network_id", "floating_ip_address")) def create_security_group(self, sg_db): pass def update_security_group(self, sg_db): pass def delete_security_group(self, sg_db): pass def create_security_group_rule(self, sg_rule_db): pass def create_security_group_rule_bulk(self, sg_rule_db): pass def delete_security_group_rule(self, sg_rule_db): pass
{ "content_hash": "eafe96a08feccf423bc22b9a5780bb50", "timestamp": "", "source": "github", "line_count": 99, "max_line_length": 76, "avg_line_length": 27.232323232323232, "alnum_prop": 0.6057121661721068, "repo_name": "cloudbase/neutron-virtualbox", "id": "a0f81fd53919763e089a618a17ef9d1bf2d818e7", "size": "3324", "binary": false, "copies": "1", "ref": "refs/heads/virtualbox_agent", "path": "neutron/plugins/plumgrid/drivers/fake_plumlib.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1043" }, { "name": "Python", "bytes": "8448838" }, { "name": "Shell", "bytes": "12510" } ], "symlink_target": "" }
import random import pandas from sklearn.cross_validation import train_test_split from sklearn.linear_model import LogisticRegression from sklearn.metrics import accuracy_score from sklearn.preprocessing import LabelEncoder from sklearn.utils import check_array import tensorflow as tf from tensorflow.contrib import layers from tensorflow.contrib import learn train = pandas.read_csv('data/titanic_train.csv') y = train.pop('Survived') # Drop all unique columns. List all variables for future reference. categorical_vars = ['Pclass', 'Sex', 'Embarked'] continues_vars = ['Age', 'SibSp', 'Parch', 'Fare'] X = train[categorical_vars + continues_vars].fillna(0) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) # Pandas input functino. def pandas_input_fn(x, y=None, batch_size=128, num_epochs=None): def input_fn(): if y is not None: x['target'] = y queue = learn.dataframe.queues.feeding_functions.enqueue_data( x, 1000, shuffle=num_epochs is None, num_epochs=num_epochs) if num_epochs is None: features = queue.dequeue_many(batch_size) else: features = queue.dequeue_up_to(batch_size) features = dict(zip(['index'] + list(x.columns), features)) if y is not None: target = features.pop('target') return features, target return features return input_fn # Process categorical variables into ids. X_train = X_train.copy() X_test = X_test.copy() categorical_var_encoders = {} for var in categorical_vars: le = LabelEncoder().fit(X_train[var]) X_train[var + '_ids'] = le.transform(X_train[var]) X_test[var + '_ids'] = le.transform(X_test[var]) X_train.pop(var) X_test.pop(var) categorical_var_encoders[var] = le CATEGORICAL_EMBED_SIZE = 10 # Note, you can customize this per variable. # 3 layer neural network with hyperbolic tangent activation. def dnn_tanh(features, target): target = tf.one_hot(target, 2, 1.0, 0.0) # Organize continues features. final_features = [tf.expand_dims(tf.cast(features[var], tf.float32), 1) for var in continues_vars] # Embed categorical variables into distributed representation. for var in categorical_vars: feature = learn.ops.categorical_variable( features[var + '_ids'], len(categorical_var_encoders[var].classes_), embedding_size=CATEGORICAL_EMBED_SIZE, name=var) final_features.append(feature) # Concatenate all features into one vector. features = tf.concat(1, final_features) # Deep Neural Network logits = layers.stack(features, layers.fully_connected, [10, 20, 10], activation_fn=tf.tanh) prediction, loss = learn.models.logistic_regression(logits, target) train_op = layers.optimize_loss(loss, tf.contrib.framework.get_global_step(), optimizer='SGD', learning_rate=0.05) return tf.argmax(prediction, dimension=1), loss, train_op random.seed(42) classifier = learn.Estimator(model_fn=dnn_tanh) # Note: not training this alomst at all. classifier.fit(input_fn=pandas_input_fn(X_train, y_train), steps=100) preds = list(classifier.predict(input_fn=pandas_input_fn(X_test, num_epochs=1), as_iterable=True)) print(accuracy_score(y_test, preds))
{ "content_hash": "5fbeeb0dd2c6138782da84aa153449be", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 102, "avg_line_length": 38.29761904761905, "alnum_prop": 0.7105999378302766, "repo_name": "ilblackdragon/tf_examples", "id": "2cd6c2493c3b94d0891c400b35f9c8e035e724cd", "size": "3217", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "titanic_all_features.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "31526" } ], "symlink_target": "" }
r""" I/O utils (:mod:`skbio.io.util`) ================================ .. currentmodule:: skbio.io.util This module provides utility functions to deal with files and I/O in general. Functions --------- .. autosummary:: :toctree: generated/ open_file open_files """ # ---------------------------------------------------------------------------- # Copyright (c) 2013--, scikit-bio development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from future.builtins import bytes, str from contextlib import contextmanager def _is_string_or_bytes(s): """Returns True if input argument is string (unicode or not) or bytes. """ return isinstance(s, str) or isinstance(s, bytes) def _get_filehandle(filepath_or, *args, **kwargs): """Open file if `filepath_or` looks like a string/unicode/bytes, else pass through. """ if _is_string_or_bytes(filepath_or): fh, own_fh = open(filepath_or, *args, **kwargs), True else: fh, own_fh = filepath_or, False return fh, own_fh @contextmanager def open_file(filepath_or, *args, **kwargs): """Context manager, like ``open``, but lets file handles and file like objects pass untouched. It is useful when implementing a function that can accept both strings and file-like objects (like numpy.loadtxt, etc). Parameters ---------- filepath_or : str/bytes/unicode string or file-like If string, file to be opened using ``open``. Else, it is returned untouched. Other parameters ---------------- args, kwargs : tuple, dict When `filepath_or` is a string, any extra arguments are passed on to the ``open`` builtin. Examples -------- >>> with open_file('filename') as f: # doctest: +SKIP ... pass >>> fh = open('filename') # doctest: +SKIP >>> with open_file(fh) as f: # doctest: +SKIP ... pass >>> fh.closed # doctest: +SKIP False >>> fh.close() # doctest: +SKIP """ fh, own_fh = _get_filehandle(filepath_or, *args, **kwargs) try: yield fh finally: if own_fh: fh.close() @contextmanager def open_files(fp_list, *args, **kwargs): fhs, owns = zip(*[_get_filehandle(f, *args, **kwargs) for f in fp_list]) try: yield fhs finally: for fh, is_own in zip(fhs, owns): if is_own: fh.close()
{ "content_hash": "3093e8ad477426ea6aa225a7de32d7e9", "timestamp": "", "source": "github", "line_count": 99, "max_line_length": 78, "avg_line_length": 26.838383838383837, "alnum_prop": 0.550621001129093, "repo_name": "JWDebelius/scikit-bio", "id": "362d19d264a67118ff15f296e11960d369653ccc", "size": "2657", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "skbio/io/util.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
from gui.widgets.frames.notebooks import Notebook from gui.widgets.frames.tabs import TargetsTab, ExtractionTab, PlotTab import constants as c class SameTabsNotebook(Notebook.Notebook): def __init__(self, parent, name, row, column, **kwargs): Notebook.Notebook.__init__(self, parent, name, row, column, **kwargs) self.tab_count = -1 self.last_tab = None self.widget.bind("<<NotebookTabChanged>>", self.tabChangedEvent) def tabChangedEvent(self, event): if event.widget.index("current") == self.tab_count+1: self.plusTabClicked() self.tabDefaultValues(-1) def addInitialTabs(self): self.last_tab = self.addTab("+") self.plusTabClicked() def newTab(self, deleteTab): raise NotImplementedError("newTab not implemented!") def addTab(self, text): tab = self.newTab(self.deleteTab) self.widget.add(tab.widget, text=text) return tab def tabDefaultValues(self, tab_index): self.widgets_list[tab_index].loadDefaultValue() def loadDefaultValue(self): Notebook.Notebook.loadDefaultValue(self) self.addInitialTabs() for i in range(self.tab_count+1): self.tabDefaultValues(i) def save(self, file): file.write(str(self.tab_count)+"\n") Notebook.Notebook.save(self, file) def load(self, file): if self.tab_count == -1: self.addInitialTabs() self.deleteAllTabs() tab_count = int(file.readline()) for i in range(tab_count): self.plusTabClicked() Notebook.Notebook.load(self, file) def deleteAllTabs(self): if self.tab_count != -1: self.widget.select(0) while self.tab_count > 0: self.deleteTab() def getCurrentTab(self): return self.widget.index("current") def deleteTab(self): current = self.getCurrentTab() del self.widgets_list[current] self.tab_count -= 1 if self.tab_count != -1: self.changeActiveTab(current) self.widget.forget(current) return current def changeActiveTab(self, current): if current == self.tab_count+1: self.widget.select(current-1) else: while current < self.tab_count+2: self.widget.tab(current, text=self.widget.tab(current, "text")-1) current += 1 def plusTabClicked(self): self.tab_count += 1 self.widgets_list.append(self.last_tab) self.widget.tab(self.tab_count, text=self.tab_count+1) self.last_tab = self.addTab(c.PLUS_TAB) def getValue(self): return {i+1: widget.getValue() for i, widget in enumerate(self.widgets_list) if not widget.disabled} class ExtractionNotebook(SameTabsNotebook): def __init__(self, parent, row, column, target_notebook_widgets, **kwargs): SameTabsNotebook.__init__(self, parent, c.EXTRACTION_NOTEBOOK, row, column, **kwargs) self.target_notebook_widgets = target_notebook_widgets def newTab(self, deleteTab): return ExtractionTab.ExtractionTab(self.widget, deleteTab, self.target_notebook_widgets) class PlotNotebook(SameTabsNotebook): def __init__(self, parent, row, column, **kwargs): SameTabsNotebook.__init__(self, parent, c.PLOT_NOTEBOOK, row, column, **kwargs) def newTab(self, deletaTab): return PlotTab.PlotTab(self.widget, deletaTab) class TargetNotebook(SameTabsNotebook): def __init__(self, parent, row, column, addTarget, removeTarget, disableTarget, enableTarget, getMonitorFreq, **kwargs): SameTabsNotebook.__init__(self, parent, c.TARGETS_NOTEBOOK, row, column, **kwargs) self.getMonitorFreq = getMonitorFreq self.addTarget = addTarget self.removeTarget = removeTarget self.disableTarget = disableTarget self.enableTarget = enableTarget def changeFreq(self): for widget in self.widgets_list: widget.changeFreq() def plusTabClicked(self): # Updates TargetChoosingMenus self.addTarget() # MainNotebook's targetAdded method which calls TargetChoosingMenu's targetAdded SameTabsNotebook.plusTabClicked(self) def getEnabledTabs(self): return list(tab.disabled for tab in self.widgets_list) def newTab(self, deleteTab): return TargetsTab.TargetsTab(self.widget, self.disableTarget, self.enableTarget, self.getMonitorFreq, deleteTab, self.getEnabledTabs, self.getCurrentTab) def deleteTab(self): # Updates TargetChoosingMenus deleted_tab = SameTabsNotebook.deleteTab(self) self.removeTarget(deleted_tab) # MainNotebook's targetRemoved method which calls TargetChoosingMenu's targetAdded
{ "content_hash": "72c42066a1856080abdebdec86ac0ee8", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 161, "avg_line_length": 36.84615384615385, "alnum_prop": 0.6586638830897703, "repo_name": "kahvel/VEP-BCI", "id": "603450369c987507b595bbe1cc43f70f802de0e1", "size": "4790", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/gui/widgets/frames/notebooks/SameTabsNotebook.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "214297" } ], "symlink_target": "" }
import datetime from typing import Dict, List, Optional, TYPE_CHECKING, Union from ... import _serialization if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from .. import models as _models class AddressSpace(_serialization.Model): """AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network. :ivar address_prefixes: A list of address blocks reserved for this virtual network in CIDR notation. :vartype address_prefixes: list[str] """ _attribute_map = { "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, } def __init__(self, *, address_prefixes: Optional[List[str]] = None, **kwargs): """ :keyword address_prefixes: A list of address blocks reserved for this virtual network in CIDR notation. :paramtype address_prefixes: list[str] """ super().__init__(**kwargs) self.address_prefixes = address_prefixes class Resource(_serialization.Model): """Common resource representation. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ super().__init__(**kwargs) self.id = id self.name = None self.type = None self.location = location self.tags = tags class ApplicationGateway(Resource): # pylint: disable=too-many-instance-attributes """Application gateway resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar zones: A list of availability zones denoting where the resource needs to come from. :vartype zones: list[str] :ivar sku: SKU of the application gateway resource. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySku :ivar ssl_policy: SSL policy of the application gateway resource. :vartype ssl_policy: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicy :ivar operational_state: Operational state of the application gateway resource. Known values are: "Stopped", "Starting", "Running", and "Stopping". :vartype operational_state: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayOperationalState :ivar gateway_ip_configurations: Subnets of application the gateway resource. :vartype gateway_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayIPConfiguration] :ivar authentication_certificates: Authentication certificates of the application gateway resource. :vartype authentication_certificates: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAuthenticationCertificate] :ivar ssl_certificates: SSL certificates of the application gateway resource. :vartype ssl_certificates: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCertificate] :ivar frontend_ip_configurations: Frontend IP addresses of the application gateway resource. :vartype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFrontendIPConfiguration] :ivar frontend_ports: Frontend ports of the application gateway resource. :vartype frontend_ports: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFrontendPort] :ivar probes: Probes of the application gateway resource. :vartype probes: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProbe] :ivar backend_address_pools: Backend address pool of the application gateway resource. :vartype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool] :ivar backend_http_settings_collection: Backend http settings of the application gateway resource. :vartype backend_http_settings_collection: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHttpSettings] :ivar http_listeners: Http listeners of the application gateway resource. :vartype http_listeners: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayHttpListener] :ivar url_path_maps: URL path map of the application gateway resource. :vartype url_path_maps: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayUrlPathMap] :ivar request_routing_rules: Request routing rules of the application gateway resource. :vartype request_routing_rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRequestRoutingRule] :ivar redirect_configurations: Redirect configurations of the application gateway resource. :vartype redirect_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRedirectConfiguration] :ivar web_application_firewall_configuration: Web application firewall configuration. :vartype web_application_firewall_configuration: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayWebApplicationFirewallConfiguration :ivar enable_http2: Whether HTTP2 is enabled on the application gateway resource. :vartype enable_http2: bool :ivar enable_fips: Whether FIPS is enabled on the application gateway resource. :vartype enable_fips: bool :ivar autoscale_configuration: Autoscale Configuration. :vartype autoscale_configuration: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAutoscaleConfiguration :ivar resource_guid: Resource GUID property of the application gateway resource. :vartype resource_guid: str :ivar provisioning_state: Provisioning state of the application gateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "operational_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "zones": {"key": "zones", "type": "[str]"}, "sku": {"key": "properties.sku", "type": "ApplicationGatewaySku"}, "ssl_policy": {"key": "properties.sslPolicy", "type": "ApplicationGatewaySslPolicy"}, "operational_state": {"key": "properties.operationalState", "type": "str"}, "gateway_ip_configurations": { "key": "properties.gatewayIPConfigurations", "type": "[ApplicationGatewayIPConfiguration]", }, "authentication_certificates": { "key": "properties.authenticationCertificates", "type": "[ApplicationGatewayAuthenticationCertificate]", }, "ssl_certificates": {"key": "properties.sslCertificates", "type": "[ApplicationGatewaySslCertificate]"}, "frontend_ip_configurations": { "key": "properties.frontendIPConfigurations", "type": "[ApplicationGatewayFrontendIPConfiguration]", }, "frontend_ports": {"key": "properties.frontendPorts", "type": "[ApplicationGatewayFrontendPort]"}, "probes": {"key": "properties.probes", "type": "[ApplicationGatewayProbe]"}, "backend_address_pools": { "key": "properties.backendAddressPools", "type": "[ApplicationGatewayBackendAddressPool]", }, "backend_http_settings_collection": { "key": "properties.backendHttpSettingsCollection", "type": "[ApplicationGatewayBackendHttpSettings]", }, "http_listeners": {"key": "properties.httpListeners", "type": "[ApplicationGatewayHttpListener]"}, "url_path_maps": {"key": "properties.urlPathMaps", "type": "[ApplicationGatewayUrlPathMap]"}, "request_routing_rules": { "key": "properties.requestRoutingRules", "type": "[ApplicationGatewayRequestRoutingRule]", }, "redirect_configurations": { "key": "properties.redirectConfigurations", "type": "[ApplicationGatewayRedirectConfiguration]", }, "web_application_firewall_configuration": { "key": "properties.webApplicationFirewallConfiguration", "type": "ApplicationGatewayWebApplicationFirewallConfiguration", }, "enable_http2": {"key": "properties.enableHttp2", "type": "bool"}, "enable_fips": {"key": "properties.enableFips", "type": "bool"}, "autoscale_configuration": { "key": "properties.autoscaleConfiguration", "type": "ApplicationGatewayAutoscaleConfiguration", }, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( # pylint: disable=too-many-locals self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, zones: Optional[List[str]] = None, sku: Optional["_models.ApplicationGatewaySku"] = None, ssl_policy: Optional["_models.ApplicationGatewaySslPolicy"] = None, gateway_ip_configurations: Optional[List["_models.ApplicationGatewayIPConfiguration"]] = None, authentication_certificates: Optional[List["_models.ApplicationGatewayAuthenticationCertificate"]] = None, ssl_certificates: Optional[List["_models.ApplicationGatewaySslCertificate"]] = None, frontend_ip_configurations: Optional[List["_models.ApplicationGatewayFrontendIPConfiguration"]] = None, frontend_ports: Optional[List["_models.ApplicationGatewayFrontendPort"]] = None, probes: Optional[List["_models.ApplicationGatewayProbe"]] = None, backend_address_pools: Optional[List["_models.ApplicationGatewayBackendAddressPool"]] = None, backend_http_settings_collection: Optional[List["_models.ApplicationGatewayBackendHttpSettings"]] = None, http_listeners: Optional[List["_models.ApplicationGatewayHttpListener"]] = None, url_path_maps: Optional[List["_models.ApplicationGatewayUrlPathMap"]] = None, request_routing_rules: Optional[List["_models.ApplicationGatewayRequestRoutingRule"]] = None, redirect_configurations: Optional[List["_models.ApplicationGatewayRedirectConfiguration"]] = None, web_application_firewall_configuration: Optional[ "_models.ApplicationGatewayWebApplicationFirewallConfiguration" ] = None, enable_http2: Optional[bool] = None, enable_fips: Optional[bool] = None, autoscale_configuration: Optional["_models.ApplicationGatewayAutoscaleConfiguration"] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword zones: A list of availability zones denoting where the resource needs to come from. :paramtype zones: list[str] :keyword sku: SKU of the application gateway resource. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySku :keyword ssl_policy: SSL policy of the application gateway resource. :paramtype ssl_policy: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicy :keyword gateway_ip_configurations: Subnets of application the gateway resource. :paramtype gateway_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayIPConfiguration] :keyword authentication_certificates: Authentication certificates of the application gateway resource. :paramtype authentication_certificates: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAuthenticationCertificate] :keyword ssl_certificates: SSL certificates of the application gateway resource. :paramtype ssl_certificates: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCertificate] :keyword frontend_ip_configurations: Frontend IP addresses of the application gateway resource. :paramtype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFrontendIPConfiguration] :keyword frontend_ports: Frontend ports of the application gateway resource. :paramtype frontend_ports: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFrontendPort] :keyword probes: Probes of the application gateway resource. :paramtype probes: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProbe] :keyword backend_address_pools: Backend address pool of the application gateway resource. :paramtype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool] :keyword backend_http_settings_collection: Backend http settings of the application gateway resource. :paramtype backend_http_settings_collection: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHttpSettings] :keyword http_listeners: Http listeners of the application gateway resource. :paramtype http_listeners: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayHttpListener] :keyword url_path_maps: URL path map of the application gateway resource. :paramtype url_path_maps: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayUrlPathMap] :keyword request_routing_rules: Request routing rules of the application gateway resource. :paramtype request_routing_rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRequestRoutingRule] :keyword redirect_configurations: Redirect configurations of the application gateway resource. :paramtype redirect_configurations: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRedirectConfiguration] :keyword web_application_firewall_configuration: Web application firewall configuration. :paramtype web_application_firewall_configuration: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayWebApplicationFirewallConfiguration :keyword enable_http2: Whether HTTP2 is enabled on the application gateway resource. :paramtype enable_http2: bool :keyword enable_fips: Whether FIPS is enabled on the application gateway resource. :paramtype enable_fips: bool :keyword autoscale_configuration: Autoscale Configuration. :paramtype autoscale_configuration: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAutoscaleConfiguration :keyword resource_guid: Resource GUID property of the application gateway resource. :paramtype resource_guid: str :keyword provisioning_state: Provisioning state of the application gateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.zones = zones self.sku = sku self.ssl_policy = ssl_policy self.operational_state = None self.gateway_ip_configurations = gateway_ip_configurations self.authentication_certificates = authentication_certificates self.ssl_certificates = ssl_certificates self.frontend_ip_configurations = frontend_ip_configurations self.frontend_ports = frontend_ports self.probes = probes self.backend_address_pools = backend_address_pools self.backend_http_settings_collection = backend_http_settings_collection self.http_listeners = http_listeners self.url_path_maps = url_path_maps self.request_routing_rules = request_routing_rules self.redirect_configurations = redirect_configurations self.web_application_firewall_configuration = web_application_firewall_configuration self.enable_http2 = enable_http2 self.enable_fips = enable_fips self.autoscale_configuration = autoscale_configuration self.resource_guid = resource_guid self.provisioning_state = provisioning_state class SubResource(_serialization.Model): """Reference to another subresource. :ivar id: Resource ID. :vartype id: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, } def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: Resource ID. :paramtype id: str """ super().__init__(**kwargs) self.id = id class ApplicationGatewayAuthenticationCertificate(SubResource): """Authentication certificates of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the authentication certificate that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar data: Certificate public data. :vartype data: str :ivar provisioning_state: Provisioning state of the authentication certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "data": {"key": "properties.data", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, data: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the authentication certificate that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword data: Certificate public data. :paramtype data: str :keyword provisioning_state: Provisioning state of the authentication certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.data = data self.provisioning_state = provisioning_state class ApplicationGatewayAutoscaleBounds(_serialization.Model): """Application Gateway autoscale bounds on number of Application Gateway instance. All required parameters must be populated in order to send to Azure. :ivar min: Lower bound on number of Application Gateway instances. Required. :vartype min: int :ivar max: Upper bound on number of Application Gateway instances. Required. :vartype max: int """ _validation = { "min": {"required": True}, "max": {"required": True}, } _attribute_map = { "min": {"key": "min", "type": "int"}, "max": {"key": "max", "type": "int"}, } def __init__( self, *, min: int, max: int, **kwargs # pylint: disable=redefined-builtin # pylint: disable=redefined-builtin ): """ :keyword min: Lower bound on number of Application Gateway instances. Required. :paramtype min: int :keyword max: Upper bound on number of Application Gateway instances. Required. :paramtype max: int """ super().__init__(**kwargs) self.min = min self.max = max class ApplicationGatewayAutoscaleConfiguration(_serialization.Model): """Application Gateway autoscale configuration. All required parameters must be populated in order to send to Azure. :ivar bounds: Autoscale bounds. Required. :vartype bounds: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAutoscaleBounds """ _validation = { "bounds": {"required": True}, } _attribute_map = { "bounds": {"key": "bounds", "type": "ApplicationGatewayAutoscaleBounds"}, } def __init__(self, *, bounds: "_models.ApplicationGatewayAutoscaleBounds", **kwargs): """ :keyword bounds: Autoscale bounds. Required. :paramtype bounds: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayAutoscaleBounds """ super().__init__(**kwargs) self.bounds = bounds class ApplicationGatewayAvailableSslOptions(Resource): """Response for ApplicationGatewayAvailableSslOptions API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar predefined_policies: List of available Ssl predefined policy. :vartype predefined_policies: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar default_policy: Name of the Ssl predefined policy applied by default to application gateway. Known values are: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401", and "AppGwSslPolicy20170401S". :vartype default_policy: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyName :ivar available_cipher_suites: List of available Ssl cipher suites. :vartype available_cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :ivar available_protocols: List of available Ssl protocols. :vartype available_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol] """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "predefined_policies": {"key": "properties.predefinedPolicies", "type": "[SubResource]"}, "default_policy": {"key": "properties.defaultPolicy", "type": "str"}, "available_cipher_suites": {"key": "properties.availableCipherSuites", "type": "[str]"}, "available_protocols": {"key": "properties.availableProtocols", "type": "[str]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, predefined_policies: Optional[List["_models.SubResource"]] = None, default_policy: Optional[Union[str, "_models.ApplicationGatewaySslPolicyName"]] = None, available_cipher_suites: Optional[List[Union[str, "_models.ApplicationGatewaySslCipherSuite"]]] = None, available_protocols: Optional[List[Union[str, "_models.ApplicationGatewaySslProtocol"]]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword predefined_policies: List of available Ssl predefined policy. :paramtype predefined_policies: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword default_policy: Name of the Ssl predefined policy applied by default to application gateway. Known values are: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401", and "AppGwSslPolicy20170401S". :paramtype default_policy: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyName :keyword available_cipher_suites: List of available Ssl cipher suites. :paramtype available_cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :keyword available_protocols: List of available Ssl protocols. :paramtype available_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.predefined_policies = predefined_policies self.default_policy = default_policy self.available_cipher_suites = available_cipher_suites self.available_protocols = available_protocols class ApplicationGatewayAvailableSslPredefinedPolicies(_serialization.Model): """Response for ApplicationGatewayAvailableSslOptions API service call. :ivar value: List of available Ssl predefined policy. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPredefinedPolicy] :ivar next_link: URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ApplicationGatewaySslPredefinedPolicy]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ApplicationGatewaySslPredefinedPolicy"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of available Ssl predefined policy. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPredefinedPolicy] :keyword next_link: URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ApplicationGatewayAvailableWafRuleSetsResult(_serialization.Model): """Response for ApplicationGatewayAvailableWafRuleSets API service call. :ivar value: The list of application gateway rule sets. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRuleSet] """ _attribute_map = { "value": {"key": "value", "type": "[ApplicationGatewayFirewallRuleSet]"}, } def __init__(self, *, value: Optional[List["_models.ApplicationGatewayFirewallRuleSet"]] = None, **kwargs): """ :keyword value: The list of application gateway rule sets. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRuleSet] """ super().__init__(**kwargs) self.value = value class ApplicationGatewayBackendAddress(_serialization.Model): """Backend address of an application gateway. :ivar fqdn: Fully qualified domain name (FQDN). :vartype fqdn: str :ivar ip_address: IP address. :vartype ip_address: str """ _attribute_map = { "fqdn": {"key": "fqdn", "type": "str"}, "ip_address": {"key": "ipAddress", "type": "str"}, } def __init__(self, *, fqdn: Optional[str] = None, ip_address: Optional[str] = None, **kwargs): """ :keyword fqdn: Fully qualified domain name (FQDN). :paramtype fqdn: str :keyword ip_address: IP address. :paramtype ip_address: str """ super().__init__(**kwargs) self.fqdn = fqdn self.ip_address = ip_address class ApplicationGatewayBackendAddressPool(SubResource): """Backend Address Pool of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the backend address pool that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar backend_ip_configurations: Collection of references to IPs defined in network interfaces. :vartype backend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :ivar backend_addresses: Backend addresses. :vartype backend_addresses: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddress] :ivar provisioning_state: Provisioning state of the backend address pool resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "backend_ip_configurations": { "key": "properties.backendIPConfigurations", "type": "[NetworkInterfaceIPConfiguration]", }, "backend_addresses": {"key": "properties.backendAddresses", "type": "[ApplicationGatewayBackendAddress]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, backend_ip_configurations: Optional[List["_models.NetworkInterfaceIPConfiguration"]] = None, backend_addresses: Optional[List["_models.ApplicationGatewayBackendAddress"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the backend address pool that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword backend_ip_configurations: Collection of references to IPs defined in network interfaces. :paramtype backend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :keyword backend_addresses: Backend addresses. :paramtype backend_addresses: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddress] :keyword provisioning_state: Provisioning state of the backend address pool resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.backend_ip_configurations = backend_ip_configurations self.backend_addresses = backend_addresses self.provisioning_state = provisioning_state class ApplicationGatewayBackendHealth(_serialization.Model): """List of ApplicationGatewayBackendHealthPool resources. :ivar backend_address_pools: :vartype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthPool] """ _attribute_map = { "backend_address_pools": {"key": "backendAddressPools", "type": "[ApplicationGatewayBackendHealthPool]"}, } def __init__( self, *, backend_address_pools: Optional[List["_models.ApplicationGatewayBackendHealthPool"]] = None, **kwargs ): """ :keyword backend_address_pools: :paramtype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthPool] """ super().__init__(**kwargs) self.backend_address_pools = backend_address_pools class ApplicationGatewayBackendHealthHttpSettings(_serialization.Model): """Application gateway BackendHealthHttp settings. :ivar backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource. :vartype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHttpSettings :ivar servers: List of ApplicationGatewayBackendHealthServer resources. :vartype servers: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthServer] """ _attribute_map = { "backend_http_settings": {"key": "backendHttpSettings", "type": "ApplicationGatewayBackendHttpSettings"}, "servers": {"key": "servers", "type": "[ApplicationGatewayBackendHealthServer]"}, } def __init__( self, *, backend_http_settings: Optional["_models.ApplicationGatewayBackendHttpSettings"] = None, servers: Optional[List["_models.ApplicationGatewayBackendHealthServer"]] = None, **kwargs ): """ :keyword backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource. :paramtype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHttpSettings :keyword servers: List of ApplicationGatewayBackendHealthServer resources. :paramtype servers: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthServer] """ super().__init__(**kwargs) self.backend_http_settings = backend_http_settings self.servers = servers class ApplicationGatewayBackendHealthPool(_serialization.Model): """Application gateway BackendHealth pool. :ivar backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource. :vartype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool :ivar backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings resources. :vartype backend_http_settings_collection: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthHttpSettings] """ _attribute_map = { "backend_address_pool": {"key": "backendAddressPool", "type": "ApplicationGatewayBackendAddressPool"}, "backend_http_settings_collection": { "key": "backendHttpSettingsCollection", "type": "[ApplicationGatewayBackendHealthHttpSettings]", }, } def __init__( self, *, backend_address_pool: Optional["_models.ApplicationGatewayBackendAddressPool"] = None, backend_http_settings_collection: Optional[List["_models.ApplicationGatewayBackendHealthHttpSettings"]] = None, **kwargs ): """ :keyword backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource. :paramtype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool :keyword backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings resources. :paramtype backend_http_settings_collection: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthHttpSettings] """ super().__init__(**kwargs) self.backend_address_pool = backend_address_pool self.backend_http_settings_collection = backend_http_settings_collection class ApplicationGatewayBackendHealthServer(_serialization.Model): """Application gateway backendhealth http settings. :ivar address: IP address or FQDN of backend server. :vartype address: str :ivar ip_configuration: Reference of IP configuration of backend server. :vartype ip_configuration: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration :ivar health: Health of backend server. Known values are: "Unknown", "Up", "Down", "Partial", and "Draining". :vartype health: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthServerHealth """ _attribute_map = { "address": {"key": "address", "type": "str"}, "ip_configuration": {"key": "ipConfiguration", "type": "NetworkInterfaceIPConfiguration"}, "health": {"key": "health", "type": "str"}, } def __init__( self, *, address: Optional[str] = None, ip_configuration: Optional["_models.NetworkInterfaceIPConfiguration"] = None, health: Optional[Union[str, "_models.ApplicationGatewayBackendHealthServerHealth"]] = None, **kwargs ): """ :keyword address: IP address or FQDN of backend server. :paramtype address: str :keyword ip_configuration: Reference of IP configuration of backend server. :paramtype ip_configuration: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration :keyword health: Health of backend server. Known values are: "Unknown", "Up", "Down", "Partial", and "Draining". :paramtype health: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendHealthServerHealth """ super().__init__(**kwargs) self.address = address self.ip_configuration = ip_configuration self.health = health class ApplicationGatewayBackendHttpSettings(SubResource): # pylint: disable=too-many-instance-attributes """Backend address pool settings of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the backend http settings that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar port: The destination port on the backend. :vartype port: int :ivar protocol: The protocol used to communicate with the backend. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :ivar cookie_based_affinity: Cookie based affinity. Known values are: "Enabled" and "Disabled". :vartype cookie_based_affinity: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayCookieBasedAffinity :ivar request_timeout: Request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds. :vartype request_timeout: int :ivar probe: Probe resource of an application gateway. :vartype probe: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar authentication_certificates: Array of references to application gateway authentication certificates. :vartype authentication_certificates: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar connection_draining: Connection draining of the backend http settings resource. :vartype connection_draining: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayConnectionDraining :ivar host_name: Host header to be sent to the backend servers. :vartype host_name: str :ivar pick_host_name_from_backend_address: Whether to pick host header should be picked from the host name of the backend server. Default value is false. :vartype pick_host_name_from_backend_address: bool :ivar affinity_cookie_name: Cookie name to use for the affinity cookie. :vartype affinity_cookie_name: str :ivar probe_enabled: Whether the probe is enabled. Default value is false. :vartype probe_enabled: bool :ivar path: Path which should be used as a prefix for all HTTP requests. Null means no path will be prefixed. Default value is null. :vartype path: str :ivar provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "port": {"key": "properties.port", "type": "int"}, "protocol": {"key": "properties.protocol", "type": "str"}, "cookie_based_affinity": {"key": "properties.cookieBasedAffinity", "type": "str"}, "request_timeout": {"key": "properties.requestTimeout", "type": "int"}, "probe": {"key": "properties.probe", "type": "SubResource"}, "authentication_certificates": {"key": "properties.authenticationCertificates", "type": "[SubResource]"}, "connection_draining": {"key": "properties.connectionDraining", "type": "ApplicationGatewayConnectionDraining"}, "host_name": {"key": "properties.hostName", "type": "str"}, "pick_host_name_from_backend_address": {"key": "properties.pickHostNameFromBackendAddress", "type": "bool"}, "affinity_cookie_name": {"key": "properties.affinityCookieName", "type": "str"}, "probe_enabled": {"key": "properties.probeEnabled", "type": "bool"}, "path": {"key": "properties.path", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, port: Optional[int] = None, protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None, cookie_based_affinity: Optional[Union[str, "_models.ApplicationGatewayCookieBasedAffinity"]] = None, request_timeout: Optional[int] = None, probe: Optional["_models.SubResource"] = None, authentication_certificates: Optional[List["_models.SubResource"]] = None, connection_draining: Optional["_models.ApplicationGatewayConnectionDraining"] = None, host_name: Optional[str] = None, pick_host_name_from_backend_address: Optional[bool] = None, affinity_cookie_name: Optional[str] = None, probe_enabled: Optional[bool] = None, path: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the backend http settings that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword port: The destination port on the backend. :paramtype port: int :keyword protocol: The protocol used to communicate with the backend. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :keyword cookie_based_affinity: Cookie based affinity. Known values are: "Enabled" and "Disabled". :paramtype cookie_based_affinity: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayCookieBasedAffinity :keyword request_timeout: Request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds. :paramtype request_timeout: int :keyword probe: Probe resource of an application gateway. :paramtype probe: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword authentication_certificates: Array of references to application gateway authentication certificates. :paramtype authentication_certificates: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword connection_draining: Connection draining of the backend http settings resource. :paramtype connection_draining: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayConnectionDraining :keyword host_name: Host header to be sent to the backend servers. :paramtype host_name: str :keyword pick_host_name_from_backend_address: Whether to pick host header should be picked from the host name of the backend server. Default value is false. :paramtype pick_host_name_from_backend_address: bool :keyword affinity_cookie_name: Cookie name to use for the affinity cookie. :paramtype affinity_cookie_name: str :keyword probe_enabled: Whether the probe is enabled. Default value is false. :paramtype probe_enabled: bool :keyword path: Path which should be used as a prefix for all HTTP requests. Null means no path will be prefixed. Default value is null. :paramtype path: str :keyword provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.port = port self.protocol = protocol self.cookie_based_affinity = cookie_based_affinity self.request_timeout = request_timeout self.probe = probe self.authentication_certificates = authentication_certificates self.connection_draining = connection_draining self.host_name = host_name self.pick_host_name_from_backend_address = pick_host_name_from_backend_address self.affinity_cookie_name = affinity_cookie_name self.probe_enabled = probe_enabled self.path = path self.provisioning_state = provisioning_state class ApplicationGatewayConnectionDraining(_serialization.Model): """Connection draining allows open connections to a backend server to be active for a specified time after the backend server got removed from the configuration. All required parameters must be populated in order to send to Azure. :ivar enabled: Whether connection draining is enabled or not. Required. :vartype enabled: bool :ivar drain_timeout_in_sec: The number of seconds connection draining is active. Acceptable values are from 1 second to 3600 seconds. Required. :vartype drain_timeout_in_sec: int """ _validation = { "enabled": {"required": True}, "drain_timeout_in_sec": {"required": True, "maximum": 3600, "minimum": 1}, } _attribute_map = { "enabled": {"key": "enabled", "type": "bool"}, "drain_timeout_in_sec": {"key": "drainTimeoutInSec", "type": "int"}, } def __init__(self, *, enabled: bool, drain_timeout_in_sec: int, **kwargs): """ :keyword enabled: Whether connection draining is enabled or not. Required. :paramtype enabled: bool :keyword drain_timeout_in_sec: The number of seconds connection draining is active. Acceptable values are from 1 second to 3600 seconds. Required. :paramtype drain_timeout_in_sec: int """ super().__init__(**kwargs) self.enabled = enabled self.drain_timeout_in_sec = drain_timeout_in_sec class ApplicationGatewayFirewallDisabledRuleGroup(_serialization.Model): """Allows to disable rules within a rule group or an entire rule group. All required parameters must be populated in order to send to Azure. :ivar rule_group_name: The name of the rule group that will be disabled. Required. :vartype rule_group_name: str :ivar rules: The list of rules that will be disabled. If null, all rules of the rule group will be disabled. :vartype rules: list[int] """ _validation = { "rule_group_name": {"required": True}, } _attribute_map = { "rule_group_name": {"key": "ruleGroupName", "type": "str"}, "rules": {"key": "rules", "type": "[int]"}, } def __init__(self, *, rule_group_name: str, rules: Optional[List[int]] = None, **kwargs): """ :keyword rule_group_name: The name of the rule group that will be disabled. Required. :paramtype rule_group_name: str :keyword rules: The list of rules that will be disabled. If null, all rules of the rule group will be disabled. :paramtype rules: list[int] """ super().__init__(**kwargs) self.rule_group_name = rule_group_name self.rules = rules class ApplicationGatewayFirewallRule(_serialization.Model): """A web application firewall rule. All required parameters must be populated in order to send to Azure. :ivar rule_id: The identifier of the web application firewall rule. Required. :vartype rule_id: int :ivar description: The description of the web application firewall rule. :vartype description: str """ _validation = { "rule_id": {"required": True}, } _attribute_map = { "rule_id": {"key": "ruleId", "type": "int"}, "description": {"key": "description", "type": "str"}, } def __init__(self, *, rule_id: int, description: Optional[str] = None, **kwargs): """ :keyword rule_id: The identifier of the web application firewall rule. Required. :paramtype rule_id: int :keyword description: The description of the web application firewall rule. :paramtype description: str """ super().__init__(**kwargs) self.rule_id = rule_id self.description = description class ApplicationGatewayFirewallRuleGroup(_serialization.Model): """A web application firewall rule group. All required parameters must be populated in order to send to Azure. :ivar rule_group_name: The name of the web application firewall rule group. Required. :vartype rule_group_name: str :ivar description: The description of the web application firewall rule group. :vartype description: str :ivar rules: The rules of the web application firewall rule group. Required. :vartype rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRule] """ _validation = { "rule_group_name": {"required": True}, "rules": {"required": True}, } _attribute_map = { "rule_group_name": {"key": "ruleGroupName", "type": "str"}, "description": {"key": "description", "type": "str"}, "rules": {"key": "rules", "type": "[ApplicationGatewayFirewallRule]"}, } def __init__( self, *, rule_group_name: str, rules: List["_models.ApplicationGatewayFirewallRule"], description: Optional[str] = None, **kwargs ): """ :keyword rule_group_name: The name of the web application firewall rule group. Required. :paramtype rule_group_name: str :keyword description: The description of the web application firewall rule group. :paramtype description: str :keyword rules: The rules of the web application firewall rule group. Required. :paramtype rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRule] """ super().__init__(**kwargs) self.rule_group_name = rule_group_name self.description = description self.rules = rules class ApplicationGatewayFirewallRuleSet(Resource): """A web application firewall rule set. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar provisioning_state: The provisioning state of the web application firewall rule set. :vartype provisioning_state: str :ivar rule_set_type: The type of the web application firewall rule set. :vartype rule_set_type: str :ivar rule_set_version: The version of the web application firewall rule set type. :vartype rule_set_version: str :ivar rule_groups: The rule groups of the web application firewall rule set. :vartype rule_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRuleGroup] """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "rule_set_type": {"key": "properties.ruleSetType", "type": "str"}, "rule_set_version": {"key": "properties.ruleSetVersion", "type": "str"}, "rule_groups": {"key": "properties.ruleGroups", "type": "[ApplicationGatewayFirewallRuleGroup]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, provisioning_state: Optional[str] = None, rule_set_type: Optional[str] = None, rule_set_version: Optional[str] = None, rule_groups: Optional[List["_models.ApplicationGatewayFirewallRuleGroup"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword provisioning_state: The provisioning state of the web application firewall rule set. :paramtype provisioning_state: str :keyword rule_set_type: The type of the web application firewall rule set. :paramtype rule_set_type: str :keyword rule_set_version: The version of the web application firewall rule set type. :paramtype rule_set_version: str :keyword rule_groups: The rule groups of the web application firewall rule set. :paramtype rule_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallRuleGroup] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.provisioning_state = provisioning_state self.rule_set_type = rule_set_type self.rule_set_version = rule_set_version self.rule_groups = rule_groups class ApplicationGatewayFrontendIPConfiguration(SubResource): """Frontend IP configuration of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the frontend IP configuration that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar private_ip_address: PrivateIPAddress of the network interface IP Configuration. :vartype private_ip_address: str :ivar private_ip_allocation_method: PrivateIP allocation method. Known values are: "Static" and "Dynamic". :vartype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar subnet: Reference of the subnet resource. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar public_ip_address: Reference of the PublicIP resource. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "private_ip_address": {"key": "properties.privateIPAddress", "type": "str"}, "private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "SubResource"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, private_ip_address: Optional[str] = None, private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, subnet: Optional["_models.SubResource"] = None, public_ip_address: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the frontend IP configuration that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword private_ip_address: PrivateIPAddress of the network interface IP Configuration. :paramtype private_ip_address: str :keyword private_ip_allocation_method: PrivateIP allocation method. Known values are: "Static" and "Dynamic". :paramtype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword subnet: Reference of the subnet resource. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword public_ip_address: Reference of the PublicIP resource. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.private_ip_address = private_ip_address self.private_ip_allocation_method = private_ip_allocation_method self.subnet = subnet self.public_ip_address = public_ip_address self.provisioning_state = provisioning_state class ApplicationGatewayFrontendPort(SubResource): """Frontend port of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the frontend port that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar port: Frontend port. :vartype port: int :ivar provisioning_state: Provisioning state of the frontend port resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "port": {"key": "properties.port", "type": "int"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, port: Optional[int] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the frontend port that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword port: Frontend port. :paramtype port: int :keyword provisioning_state: Provisioning state of the frontend port resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.port = port self.provisioning_state = provisioning_state class ApplicationGatewayHttpListener(SubResource): # pylint: disable=too-many-instance-attributes """Http listener of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the HTTP listener that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar frontend_ip_configuration: Frontend IP configuration resource of an application gateway. :vartype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar frontend_port: Frontend port resource of an application gateway. :vartype frontend_port: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar protocol: Protocol of the HTTP listener. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :ivar host_name: Host name of HTTP listener. :vartype host_name: str :ivar ssl_certificate: SSL certificate resource of an application gateway. :vartype ssl_certificate: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar require_server_name_indication: Applicable only if protocol is https. Enables SNI for multi-hosting. :vartype require_server_name_indication: bool :ivar provisioning_state: Provisioning state of the HTTP listener resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"}, "frontend_port": {"key": "properties.frontendPort", "type": "SubResource"}, "protocol": {"key": "properties.protocol", "type": "str"}, "host_name": {"key": "properties.hostName", "type": "str"}, "ssl_certificate": {"key": "properties.sslCertificate", "type": "SubResource"}, "require_server_name_indication": {"key": "properties.requireServerNameIndication", "type": "bool"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, frontend_ip_configuration: Optional["_models.SubResource"] = None, frontend_port: Optional["_models.SubResource"] = None, protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None, host_name: Optional[str] = None, ssl_certificate: Optional["_models.SubResource"] = None, require_server_name_indication: Optional[bool] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the HTTP listener that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword frontend_ip_configuration: Frontend IP configuration resource of an application gateway. :paramtype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword frontend_port: Frontend port resource of an application gateway. :paramtype frontend_port: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword protocol: Protocol of the HTTP listener. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :keyword host_name: Host name of HTTP listener. :paramtype host_name: str :keyword ssl_certificate: SSL certificate resource of an application gateway. :paramtype ssl_certificate: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword require_server_name_indication: Applicable only if protocol is https. Enables SNI for multi-hosting. :paramtype require_server_name_indication: bool :keyword provisioning_state: Provisioning state of the HTTP listener resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.frontend_ip_configuration = frontend_ip_configuration self.frontend_port = frontend_port self.protocol = protocol self.host_name = host_name self.ssl_certificate = ssl_certificate self.require_server_name_indication = require_server_name_indication self.provisioning_state = provisioning_state class ApplicationGatewayIPConfiguration(SubResource): """IP configuration of an application gateway. Currently 1 public and 1 private IP configuration is allowed. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the IP configuration that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar subnet: Reference of the subnet resource. A subnet from where application gateway gets its private address. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Provisioning state of the application gateway subnet resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, subnet: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the IP configuration that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword subnet: Reference of the subnet resource. A subnet from where application gateway gets its private address. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Provisioning state of the application gateway subnet resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.subnet = subnet self.provisioning_state = provisioning_state class ApplicationGatewayListResult(_serialization.Model): """Response for ListApplicationGateways API service call. :ivar value: List of an application gateways in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGateway] :ivar next_link: URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ApplicationGateway]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ApplicationGateway"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of an application gateways in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGateway] :keyword next_link: URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ApplicationGatewayPathRule(SubResource): """Path rule of URL path map of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the path rule that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar paths: Path rules of URL path map. :vartype paths: list[str] :ivar backend_address_pool: Backend address pool resource of URL path map path rule. :vartype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar backend_http_settings: Backend http settings resource of URL path map path rule. :vartype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar redirect_configuration: Redirect configuration resource of URL path map path rule. :vartype redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "paths": {"key": "properties.paths", "type": "[str]"}, "backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"}, "backend_http_settings": {"key": "properties.backendHttpSettings", "type": "SubResource"}, "redirect_configuration": {"key": "properties.redirectConfiguration", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, paths: Optional[List[str]] = None, backend_address_pool: Optional["_models.SubResource"] = None, backend_http_settings: Optional["_models.SubResource"] = None, redirect_configuration: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the path rule that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword paths: Path rules of URL path map. :paramtype paths: list[str] :keyword backend_address_pool: Backend address pool resource of URL path map path rule. :paramtype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword backend_http_settings: Backend http settings resource of URL path map path rule. :paramtype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword redirect_configuration: Redirect configuration resource of URL path map path rule. :paramtype redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.paths = paths self.backend_address_pool = backend_address_pool self.backend_http_settings = backend_http_settings self.redirect_configuration = redirect_configuration self.provisioning_state = provisioning_state class ApplicationGatewayProbe(SubResource): # pylint: disable=too-many-instance-attributes """Probe of the application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the probe that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar protocol: The protocol used for the probe. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :ivar host: Host name to send the probe to. :vartype host: str :ivar path: Relative path of probe. Valid path starts from '/'. Probe is sent to :code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`. :vartype path: str :ivar interval: The probing interval in seconds. This is the time interval between two consecutive probes. Acceptable values are from 1 second to 86400 seconds. :vartype interval: int :ivar timeout: the probe timeout in seconds. Probe marked as failed if valid response is not received with this timeout period. Acceptable values are from 1 second to 86400 seconds. :vartype timeout: int :ivar unhealthy_threshold: The probe retry count. Backend server is marked down after consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second to 20. :vartype unhealthy_threshold: int :ivar pick_host_name_from_backend_http_settings: Whether the host header should be picked from the backend http settings. Default value is false. :vartype pick_host_name_from_backend_http_settings: bool :ivar min_servers: Minimum number of servers that are always marked healthy. Default value is 0. :vartype min_servers: int :ivar match: Criterion for classifying a healthy probe response. :vartype match: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProbeHealthResponseMatch :ivar provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "protocol": {"key": "properties.protocol", "type": "str"}, "host": {"key": "properties.host", "type": "str"}, "path": {"key": "properties.path", "type": "str"}, "interval": {"key": "properties.interval", "type": "int"}, "timeout": {"key": "properties.timeout", "type": "int"}, "unhealthy_threshold": {"key": "properties.unhealthyThreshold", "type": "int"}, "pick_host_name_from_backend_http_settings": { "key": "properties.pickHostNameFromBackendHttpSettings", "type": "bool", }, "min_servers": {"key": "properties.minServers", "type": "int"}, "match": {"key": "properties.match", "type": "ApplicationGatewayProbeHealthResponseMatch"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None, host: Optional[str] = None, path: Optional[str] = None, interval: Optional[int] = None, timeout: Optional[int] = None, unhealthy_threshold: Optional[int] = None, pick_host_name_from_backend_http_settings: Optional[bool] = None, min_servers: Optional[int] = None, match: Optional["_models.ApplicationGatewayProbeHealthResponseMatch"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the probe that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword protocol: The protocol used for the probe. Possible values are 'Http' and 'Https'. Known values are: "Http" and "Https". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProtocol :keyword host: Host name to send the probe to. :paramtype host: str :keyword path: Relative path of probe. Valid path starts from '/'. Probe is sent to :code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`. :paramtype path: str :keyword interval: The probing interval in seconds. This is the time interval between two consecutive probes. Acceptable values are from 1 second to 86400 seconds. :paramtype interval: int :keyword timeout: the probe timeout in seconds. Probe marked as failed if valid response is not received with this timeout period. Acceptable values are from 1 second to 86400 seconds. :paramtype timeout: int :keyword unhealthy_threshold: The probe retry count. Backend server is marked down after consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second to 20. :paramtype unhealthy_threshold: int :keyword pick_host_name_from_backend_http_settings: Whether the host header should be picked from the backend http settings. Default value is false. :paramtype pick_host_name_from_backend_http_settings: bool :keyword min_servers: Minimum number of servers that are always marked healthy. Default value is 0. :paramtype min_servers: int :keyword match: Criterion for classifying a healthy probe response. :paramtype match: ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayProbeHealthResponseMatch :keyword provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.protocol = protocol self.host = host self.path = path self.interval = interval self.timeout = timeout self.unhealthy_threshold = unhealthy_threshold self.pick_host_name_from_backend_http_settings = pick_host_name_from_backend_http_settings self.min_servers = min_servers self.match = match self.provisioning_state = provisioning_state class ApplicationGatewayProbeHealthResponseMatch(_serialization.Model): """Application gateway probe health response match. :ivar body: Body that must be contained in the health response. Default value is empty. :vartype body: str :ivar status_codes: Allowed ranges of healthy status codes. Default range of healthy status codes is 200-399. :vartype status_codes: list[str] """ _attribute_map = { "body": {"key": "body", "type": "str"}, "status_codes": {"key": "statusCodes", "type": "[str]"}, } def __init__(self, *, body: Optional[str] = None, status_codes: Optional[List[str]] = None, **kwargs): """ :keyword body: Body that must be contained in the health response. Default value is empty. :paramtype body: str :keyword status_codes: Allowed ranges of healthy status codes. Default range of healthy status codes is 200-399. :paramtype status_codes: list[str] """ super().__init__(**kwargs) self.body = body self.status_codes = status_codes class ApplicationGatewayRedirectConfiguration(SubResource): # pylint: disable=too-many-instance-attributes """Redirect configuration of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the redirect configuration that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar redirect_type: Supported http redirection types - Permanent, Temporary, Found, SeeOther. Known values are: "Permanent", "Found", "SeeOther", and "Temporary". :vartype redirect_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRedirectType :ivar target_listener: Reference to a listener to redirect the request to. :vartype target_listener: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar target_url: Url to redirect the request to. :vartype target_url: str :ivar include_path: Include path in the redirected url. :vartype include_path: bool :ivar include_query_string: Include query string in the redirected url. :vartype include_query_string: bool :ivar request_routing_rules: Request routing specifying redirect configuration. :vartype request_routing_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar url_path_maps: Url path maps specifying default redirect configuration. :vartype url_path_maps: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar path_rules: Path rules specifying redirect configuration. :vartype path_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "redirect_type": {"key": "properties.redirectType", "type": "str"}, "target_listener": {"key": "properties.targetListener", "type": "SubResource"}, "target_url": {"key": "properties.targetUrl", "type": "str"}, "include_path": {"key": "properties.includePath", "type": "bool"}, "include_query_string": {"key": "properties.includeQueryString", "type": "bool"}, "request_routing_rules": {"key": "properties.requestRoutingRules", "type": "[SubResource]"}, "url_path_maps": {"key": "properties.urlPathMaps", "type": "[SubResource]"}, "path_rules": {"key": "properties.pathRules", "type": "[SubResource]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, redirect_type: Optional[Union[str, "_models.ApplicationGatewayRedirectType"]] = None, target_listener: Optional["_models.SubResource"] = None, target_url: Optional[str] = None, include_path: Optional[bool] = None, include_query_string: Optional[bool] = None, request_routing_rules: Optional[List["_models.SubResource"]] = None, url_path_maps: Optional[List["_models.SubResource"]] = None, path_rules: Optional[List["_models.SubResource"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the redirect configuration that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword redirect_type: Supported http redirection types - Permanent, Temporary, Found, SeeOther. Known values are: "Permanent", "Found", "SeeOther", and "Temporary". :paramtype redirect_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRedirectType :keyword target_listener: Reference to a listener to redirect the request to. :paramtype target_listener: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword target_url: Url to redirect the request to. :paramtype target_url: str :keyword include_path: Include path in the redirected url. :paramtype include_path: bool :keyword include_query_string: Include query string in the redirected url. :paramtype include_query_string: bool :keyword request_routing_rules: Request routing specifying redirect configuration. :paramtype request_routing_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword url_path_maps: Url path maps specifying default redirect configuration. :paramtype url_path_maps: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword path_rules: Path rules specifying redirect configuration. :paramtype path_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.redirect_type = redirect_type self.target_listener = target_listener self.target_url = target_url self.include_path = include_path self.include_query_string = include_query_string self.request_routing_rules = request_routing_rules self.url_path_maps = url_path_maps self.path_rules = path_rules class ApplicationGatewayRequestRoutingRule(SubResource): # pylint: disable=too-many-instance-attributes """Request routing rule of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the request routing rule that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar rule_type: Rule type. Known values are: "Basic" and "PathBasedRouting". :vartype rule_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRequestRoutingRuleType :ivar backend_address_pool: Backend address pool resource of the application gateway. :vartype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar backend_http_settings: Backend http settings resource of the application gateway. :vartype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar http_listener: Http listener resource of the application gateway. :vartype http_listener: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar url_path_map: URL path map resource of the application gateway. :vartype url_path_map: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar redirect_configuration: Redirect configuration resource of the application gateway. :vartype redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Provisioning state of the request routing rule resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "rule_type": {"key": "properties.ruleType", "type": "str"}, "backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"}, "backend_http_settings": {"key": "properties.backendHttpSettings", "type": "SubResource"}, "http_listener": {"key": "properties.httpListener", "type": "SubResource"}, "url_path_map": {"key": "properties.urlPathMap", "type": "SubResource"}, "redirect_configuration": {"key": "properties.redirectConfiguration", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, rule_type: Optional[Union[str, "_models.ApplicationGatewayRequestRoutingRuleType"]] = None, backend_address_pool: Optional["_models.SubResource"] = None, backend_http_settings: Optional["_models.SubResource"] = None, http_listener: Optional["_models.SubResource"] = None, url_path_map: Optional["_models.SubResource"] = None, redirect_configuration: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the request routing rule that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword rule_type: Rule type. Known values are: "Basic" and "PathBasedRouting". :paramtype rule_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayRequestRoutingRuleType :keyword backend_address_pool: Backend address pool resource of the application gateway. :paramtype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword backend_http_settings: Backend http settings resource of the application gateway. :paramtype backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword http_listener: Http listener resource of the application gateway. :paramtype http_listener: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword url_path_map: URL path map resource of the application gateway. :paramtype url_path_map: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword redirect_configuration: Redirect configuration resource of the application gateway. :paramtype redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Provisioning state of the request routing rule resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.rule_type = rule_type self.backend_address_pool = backend_address_pool self.backend_http_settings = backend_http_settings self.http_listener = http_listener self.url_path_map = url_path_map self.redirect_configuration = redirect_configuration self.provisioning_state = provisioning_state class ApplicationGatewaySku(_serialization.Model): """SKU of an application gateway. :ivar name: Name of an application gateway SKU. Known values are: "Standard_Small", "Standard_Medium", "Standard_Large", "WAF_Medium", "WAF_Large", "Standard_v2", and "WAF_v2". :vartype name: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySkuName :ivar tier: Tier of an application gateway. Known values are: "Standard", "WAF", "Standard_v2", and "WAF_v2". :vartype tier: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayTier :ivar capacity: Capacity (instance count) of an application gateway. :vartype capacity: int """ _attribute_map = { "name": {"key": "name", "type": "str"}, "tier": {"key": "tier", "type": "str"}, "capacity": {"key": "capacity", "type": "int"}, } def __init__( self, *, name: Optional[Union[str, "_models.ApplicationGatewaySkuName"]] = None, tier: Optional[Union[str, "_models.ApplicationGatewayTier"]] = None, capacity: Optional[int] = None, **kwargs ): """ :keyword name: Name of an application gateway SKU. Known values are: "Standard_Small", "Standard_Medium", "Standard_Large", "WAF_Medium", "WAF_Large", "Standard_v2", and "WAF_v2". :paramtype name: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySkuName :keyword tier: Tier of an application gateway. Known values are: "Standard", "WAF", "Standard_v2", and "WAF_v2". :paramtype tier: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayTier :keyword capacity: Capacity (instance count) of an application gateway. :paramtype capacity: int """ super().__init__(**kwargs) self.name = name self.tier = tier self.capacity = capacity class ApplicationGatewaySslCertificate(SubResource): """SSL certificates of an application gateway. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the SSL certificate that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar data: Base-64 encoded pfx certificate. Only applicable in PUT Request. :vartype data: str :ivar password: Password for the pfx file specified in data. Only applicable in PUT request. :vartype password: str :ivar public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request. :vartype public_cert_data: str :ivar provisioning_state: Provisioning state of the SSL certificate resource Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "data": {"key": "properties.data", "type": "str"}, "password": {"key": "properties.password", "type": "str"}, "public_cert_data": {"key": "properties.publicCertData", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, data: Optional[str] = None, password: Optional[str] = None, public_cert_data: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the SSL certificate that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword data: Base-64 encoded pfx certificate. Only applicable in PUT Request. :paramtype data: str :keyword password: Password for the pfx file specified in data. Only applicable in PUT request. :paramtype password: str :keyword public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request. :paramtype public_cert_data: str :keyword provisioning_state: Provisioning state of the SSL certificate resource Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.data = data self.password = password self.public_cert_data = public_cert_data self.provisioning_state = provisioning_state class ApplicationGatewaySslPolicy(_serialization.Model): """Application Gateway Ssl policy. :ivar disabled_ssl_protocols: Ssl protocols to be disabled on application gateway. :vartype disabled_ssl_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol] :ivar policy_type: Type of Ssl Policy. Known values are: "Predefined" and "Custom". :vartype policy_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyType :ivar policy_name: Name of Ssl predefined policy. Known values are: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401", and "AppGwSslPolicy20170401S". :vartype policy_name: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyName :ivar cipher_suites: Ssl cipher suites to be enabled in the specified order to application gateway. :vartype cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :ivar min_protocol_version: Minimum version of Ssl protocol to be supported on application gateway. Known values are: "TLSv1_0", "TLSv1_1", and "TLSv1_2". :vartype min_protocol_version: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol """ _attribute_map = { "disabled_ssl_protocols": {"key": "disabledSslProtocols", "type": "[str]"}, "policy_type": {"key": "policyType", "type": "str"}, "policy_name": {"key": "policyName", "type": "str"}, "cipher_suites": {"key": "cipherSuites", "type": "[str]"}, "min_protocol_version": {"key": "minProtocolVersion", "type": "str"}, } def __init__( self, *, disabled_ssl_protocols: Optional[List[Union[str, "_models.ApplicationGatewaySslProtocol"]]] = None, policy_type: Optional[Union[str, "_models.ApplicationGatewaySslPolicyType"]] = None, policy_name: Optional[Union[str, "_models.ApplicationGatewaySslPolicyName"]] = None, cipher_suites: Optional[List[Union[str, "_models.ApplicationGatewaySslCipherSuite"]]] = None, min_protocol_version: Optional[Union[str, "_models.ApplicationGatewaySslProtocol"]] = None, **kwargs ): """ :keyword disabled_ssl_protocols: Ssl protocols to be disabled on application gateway. :paramtype disabled_ssl_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol] :keyword policy_type: Type of Ssl Policy. Known values are: "Predefined" and "Custom". :paramtype policy_type: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyType :keyword policy_name: Name of Ssl predefined policy. Known values are: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401", and "AppGwSslPolicy20170401S". :paramtype policy_name: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslPolicyName :keyword cipher_suites: Ssl cipher suites to be enabled in the specified order to application gateway. :paramtype cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :keyword min_protocol_version: Minimum version of Ssl protocol to be supported on application gateway. Known values are: "TLSv1_0", "TLSv1_1", and "TLSv1_2". :paramtype min_protocol_version: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol """ super().__init__(**kwargs) self.disabled_ssl_protocols = disabled_ssl_protocols self.policy_type = policy_type self.policy_name = policy_name self.cipher_suites = cipher_suites self.min_protocol_version = min_protocol_version class ApplicationGatewaySslPredefinedPolicy(SubResource): """An Ssl predefined policy. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the Ssl predefined policy. :vartype name: str :ivar cipher_suites: Ssl cipher suites to be enabled in the specified order for application gateway. :vartype cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :ivar min_protocol_version: Minimum version of Ssl protocol to be supported on application gateway. Known values are: "TLSv1_0", "TLSv1_1", and "TLSv1_2". :vartype min_protocol_version: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "cipher_suites": {"key": "properties.cipherSuites", "type": "[str]"}, "min_protocol_version": {"key": "properties.minProtocolVersion", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, cipher_suites: Optional[List[Union[str, "_models.ApplicationGatewaySslCipherSuite"]]] = None, min_protocol_version: Optional[Union[str, "_models.ApplicationGatewaySslProtocol"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the Ssl predefined policy. :paramtype name: str :keyword cipher_suites: Ssl cipher suites to be enabled in the specified order for application gateway. :paramtype cipher_suites: list[str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslCipherSuite] :keyword min_protocol_version: Minimum version of Ssl protocol to be supported on application gateway. Known values are: "TLSv1_0", "TLSv1_1", and "TLSv1_2". :paramtype min_protocol_version: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewaySslProtocol """ super().__init__(id=id, **kwargs) self.name = name self.cipher_suites = cipher_suites self.min_protocol_version = min_protocol_version class ApplicationGatewayUrlPathMap(SubResource): """UrlPathMaps give a url path to the backend mapping information for PathBasedRouting. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the URL path map that is unique within an Application Gateway. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :ivar default_backend_address_pool: Default backend address pool resource of URL path map. :vartype default_backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar default_backend_http_settings: Default backend http settings resource of URL path map. :vartype default_backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar default_redirect_configuration: Default redirect configuration resource of URL path map. :vartype default_redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar path_rules: Path rule of URL path map resource. :vartype path_rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayPathRule] :ivar provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "default_backend_address_pool": {"key": "properties.defaultBackendAddressPool", "type": "SubResource"}, "default_backend_http_settings": {"key": "properties.defaultBackendHttpSettings", "type": "SubResource"}, "default_redirect_configuration": {"key": "properties.defaultRedirectConfiguration", "type": "SubResource"}, "path_rules": {"key": "properties.pathRules", "type": "[ApplicationGatewayPathRule]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, type: Optional[str] = None, default_backend_address_pool: Optional["_models.SubResource"] = None, default_backend_http_settings: Optional["_models.SubResource"] = None, default_redirect_configuration: Optional["_models.SubResource"] = None, path_rules: Optional[List["_models.ApplicationGatewayPathRule"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the URL path map that is unique within an Application Gateway. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword type: Type of the resource. :paramtype type: str :keyword default_backend_address_pool: Default backend address pool resource of URL path map. :paramtype default_backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword default_backend_http_settings: Default backend http settings resource of URL path map. :paramtype default_backend_http_settings: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword default_redirect_configuration: Default redirect configuration resource of URL path map. :paramtype default_redirect_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword path_rules: Path rule of URL path map resource. :paramtype path_rules: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayPathRule] :keyword provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.type = type self.default_backend_address_pool = default_backend_address_pool self.default_backend_http_settings = default_backend_http_settings self.default_redirect_configuration = default_redirect_configuration self.path_rules = path_rules self.provisioning_state = provisioning_state class ApplicationGatewayWebApplicationFirewallConfiguration(_serialization.Model): """Application gateway web application firewall configuration. All required parameters must be populated in order to send to Azure. :ivar enabled: Whether the web application firewall is enabled or not. Required. :vartype enabled: bool :ivar firewall_mode: Web application firewall mode. Required. Known values are: "Detection" and "Prevention". :vartype firewall_mode: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallMode :ivar rule_set_type: The type of the web application firewall rule set. Possible values are: 'OWASP'. Required. :vartype rule_set_type: str :ivar rule_set_version: The version of the rule set type. Required. :vartype rule_set_version: str :ivar disabled_rule_groups: The disabled rule groups. :vartype disabled_rule_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallDisabledRuleGroup] :ivar request_body_check: Whether allow WAF to check request Body. :vartype request_body_check: bool :ivar max_request_body_size: Maximum request body size for WAF. :vartype max_request_body_size: int """ _validation = { "enabled": {"required": True}, "firewall_mode": {"required": True}, "rule_set_type": {"required": True}, "rule_set_version": {"required": True}, "max_request_body_size": {"maximum": 128, "minimum": 8}, } _attribute_map = { "enabled": {"key": "enabled", "type": "bool"}, "firewall_mode": {"key": "firewallMode", "type": "str"}, "rule_set_type": {"key": "ruleSetType", "type": "str"}, "rule_set_version": {"key": "ruleSetVersion", "type": "str"}, "disabled_rule_groups": {"key": "disabledRuleGroups", "type": "[ApplicationGatewayFirewallDisabledRuleGroup]"}, "request_body_check": {"key": "requestBodyCheck", "type": "bool"}, "max_request_body_size": {"key": "maxRequestBodySize", "type": "int"}, } def __init__( self, *, enabled: bool, firewall_mode: Union[str, "_models.ApplicationGatewayFirewallMode"], rule_set_type: str, rule_set_version: str, disabled_rule_groups: Optional[List["_models.ApplicationGatewayFirewallDisabledRuleGroup"]] = None, request_body_check: Optional[bool] = None, max_request_body_size: Optional[int] = None, **kwargs ): """ :keyword enabled: Whether the web application firewall is enabled or not. Required. :paramtype enabled: bool :keyword firewall_mode: Web application firewall mode. Required. Known values are: "Detection" and "Prevention". :paramtype firewall_mode: str or ~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallMode :keyword rule_set_type: The type of the web application firewall rule set. Possible values are: 'OWASP'. Required. :paramtype rule_set_type: str :keyword rule_set_version: The version of the rule set type. Required. :paramtype rule_set_version: str :keyword disabled_rule_groups: The disabled rule groups. :paramtype disabled_rule_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayFirewallDisabledRuleGroup] :keyword request_body_check: Whether allow WAF to check request Body. :paramtype request_body_check: bool :keyword max_request_body_size: Maximum request body size for WAF. :paramtype max_request_body_size: int """ super().__init__(**kwargs) self.enabled = enabled self.firewall_mode = firewall_mode self.rule_set_type = rule_set_type self.rule_set_version = rule_set_version self.disabled_rule_groups = disabled_rule_groups self.request_body_check = request_body_check self.max_request_body_size = max_request_body_size class ApplicationSecurityGroup(Resource): """An application security group in a resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar resource_guid: The resource GUID property of the application security group resource. It uniquely identifies a resource, even if the user changes its name or migrate the resource across subscriptions or resource groups. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the application security group resource. Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, "resource_guid": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.resource_guid = None self.provisioning_state = None class ApplicationSecurityGroupListResult(_serialization.Model): """A list of application security groups. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of application security groups. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[ApplicationSecurityGroup]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.ApplicationSecurityGroup"]] = None, **kwargs): """ :keyword value: A list of application security groups. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] """ super().__init__(**kwargs) self.value = value self.next_link = None class AuthorizationListResult(_serialization.Model): """Response for ListAuthorizations API service call retrieves all authorizations that belongs to an ExpressRouteCircuit. :ivar value: The authorizations in an ExpressRoute Circuit. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitAuthorization] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuitAuthorization]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuitAuthorization"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The authorizations in an ExpressRoute Circuit. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitAuthorization] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class Availability(_serialization.Model): """Availability of the metric. :ivar time_grain: The time grain of the availability. :vartype time_grain: str :ivar retention: The retention of the availability. :vartype retention: str :ivar blob_duration: Duration of the availability blob. :vartype blob_duration: str """ _attribute_map = { "time_grain": {"key": "timeGrain", "type": "str"}, "retention": {"key": "retention", "type": "str"}, "blob_duration": {"key": "blobDuration", "type": "str"}, } def __init__( self, *, time_grain: Optional[str] = None, retention: Optional[str] = None, blob_duration: Optional[str] = None, **kwargs ): """ :keyword time_grain: The time grain of the availability. :paramtype time_grain: str :keyword retention: The retention of the availability. :paramtype retention: str :keyword blob_duration: Duration of the availability blob. :paramtype blob_duration: str """ super().__init__(**kwargs) self.time_grain = time_grain self.retention = retention self.blob_duration = blob_duration class AvailableProvidersList(_serialization.Model): """List of available countries with details. All required parameters must be populated in order to send to Azure. :ivar countries: List of available countries. Required. :vartype countries: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListCountry] """ _validation = { "countries": {"required": True}, } _attribute_map = { "countries": {"key": "countries", "type": "[AvailableProvidersListCountry]"}, } def __init__(self, *, countries: List["_models.AvailableProvidersListCountry"], **kwargs): """ :keyword countries: List of available countries. Required. :paramtype countries: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListCountry] """ super().__init__(**kwargs) self.countries = countries class AvailableProvidersListCity(_serialization.Model): """City or town details. :ivar city_name: The city or town name. :vartype city_name: str :ivar providers: A list of Internet service providers. :vartype providers: list[str] """ _attribute_map = { "city_name": {"key": "cityName", "type": "str"}, "providers": {"key": "providers", "type": "[str]"}, } def __init__(self, *, city_name: Optional[str] = None, providers: Optional[List[str]] = None, **kwargs): """ :keyword city_name: The city or town name. :paramtype city_name: str :keyword providers: A list of Internet service providers. :paramtype providers: list[str] """ super().__init__(**kwargs) self.city_name = city_name self.providers = providers class AvailableProvidersListCountry(_serialization.Model): """Country details. :ivar country_name: The country name. :vartype country_name: str :ivar providers: A list of Internet service providers. :vartype providers: list[str] :ivar states: List of available states in the country. :vartype states: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListState] """ _attribute_map = { "country_name": {"key": "countryName", "type": "str"}, "providers": {"key": "providers", "type": "[str]"}, "states": {"key": "states", "type": "[AvailableProvidersListState]"}, } def __init__( self, *, country_name: Optional[str] = None, providers: Optional[List[str]] = None, states: Optional[List["_models.AvailableProvidersListState"]] = None, **kwargs ): """ :keyword country_name: The country name. :paramtype country_name: str :keyword providers: A list of Internet service providers. :paramtype providers: list[str] :keyword states: List of available states in the country. :paramtype states: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListState] """ super().__init__(**kwargs) self.country_name = country_name self.providers = providers self.states = states class AvailableProvidersListParameters(_serialization.Model): """Constraints that determine the list of available Internet service providers. :ivar azure_locations: A list of Azure regions. :vartype azure_locations: list[str] :ivar country: The country for available providers list. :vartype country: str :ivar state: The state for available providers list. :vartype state: str :ivar city: The city or town for available providers list. :vartype city: str """ _attribute_map = { "azure_locations": {"key": "azureLocations", "type": "[str]"}, "country": {"key": "country", "type": "str"}, "state": {"key": "state", "type": "str"}, "city": {"key": "city", "type": "str"}, } def __init__( self, *, azure_locations: Optional[List[str]] = None, country: Optional[str] = None, state: Optional[str] = None, city: Optional[str] = None, **kwargs ): """ :keyword azure_locations: A list of Azure regions. :paramtype azure_locations: list[str] :keyword country: The country for available providers list. :paramtype country: str :keyword state: The state for available providers list. :paramtype state: str :keyword city: The city or town for available providers list. :paramtype city: str """ super().__init__(**kwargs) self.azure_locations = azure_locations self.country = country self.state = state self.city = city class AvailableProvidersListState(_serialization.Model): """State details. :ivar state_name: The state name. :vartype state_name: str :ivar providers: A list of Internet service providers. :vartype providers: list[str] :ivar cities: List of available cities or towns in the state. :vartype cities: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListCity] """ _attribute_map = { "state_name": {"key": "stateName", "type": "str"}, "providers": {"key": "providers", "type": "[str]"}, "cities": {"key": "cities", "type": "[AvailableProvidersListCity]"}, } def __init__( self, *, state_name: Optional[str] = None, providers: Optional[List[str]] = None, cities: Optional[List["_models.AvailableProvidersListCity"]] = None, **kwargs ): """ :keyword state_name: The state name. :paramtype state_name: str :keyword providers: A list of Internet service providers. :paramtype providers: list[str] :keyword cities: List of available cities or towns in the state. :paramtype cities: list[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListCity] """ super().__init__(**kwargs) self.state_name = state_name self.providers = providers self.cities = cities class AzureAsyncOperationResult(_serialization.Model): """The response body contains the status of the specified asynchronous operation, indicating whether it has succeeded, is in progress, or has failed. Note that this status is distinct from the HTTP status code returned for the Get Operation Status operation itself. If the asynchronous operation succeeded, the response body includes the HTTP status code for the successful request. If the asynchronous operation failed, the response body includes the HTTP status code for the failed request and error information regarding the failure. :ivar status: Status of the Azure async operation. Possible values are: 'InProgress', 'Succeeded', and 'Failed'. Known values are: "InProgress", "Succeeded", and "Failed". :vartype status: str or ~azure.mgmt.network.v2018_07_01.models.NetworkOperationStatus :ivar error: :vartype error: ~azure.mgmt.network.v2018_07_01.models.Error """ _attribute_map = { "status": {"key": "status", "type": "str"}, "error": {"key": "error", "type": "Error"}, } def __init__( self, *, status: Optional[Union[str, "_models.NetworkOperationStatus"]] = None, error: Optional["_models.Error"] = None, **kwargs ): """ :keyword status: Status of the Azure async operation. Possible values are: 'InProgress', 'Succeeded', and 'Failed'. Known values are: "InProgress", "Succeeded", and "Failed". :paramtype status: str or ~azure.mgmt.network.v2018_07_01.models.NetworkOperationStatus :keyword error: :paramtype error: ~azure.mgmt.network.v2018_07_01.models.Error """ super().__init__(**kwargs) self.status = status self.error = error class AzureFirewall(Resource): """Azure Firewall resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar application_rule_collections: Collection of application rule collections used by a Azure Firewall. :vartype application_rule_collections: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleCollection] :ivar network_rule_collections: Collection of network rule collections used by a Azure Firewall. :vartype network_rule_collections: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRuleCollection] :ivar ip_configurations: IP configuration of the Azure Firewall resource. :vartype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallIPConfiguration] :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "application_rule_collections": { "key": "properties.applicationRuleCollections", "type": "[AzureFirewallApplicationRuleCollection]", }, "network_rule_collections": { "key": "properties.networkRuleCollections", "type": "[AzureFirewallNetworkRuleCollection]", }, "ip_configurations": {"key": "properties.ipConfigurations", "type": "[AzureFirewallIPConfiguration]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, application_rule_collections: Optional[List["_models.AzureFirewallApplicationRuleCollection"]] = None, network_rule_collections: Optional[List["_models.AzureFirewallNetworkRuleCollection"]] = None, ip_configurations: Optional[List["_models.AzureFirewallIPConfiguration"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword application_rule_collections: Collection of application rule collections used by a Azure Firewall. :paramtype application_rule_collections: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleCollection] :keyword network_rule_collections: Collection of network rule collections used by a Azure Firewall. :paramtype network_rule_collections: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRuleCollection] :keyword ip_configurations: IP configuration of the Azure Firewall resource. :paramtype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallIPConfiguration] :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.application_rule_collections = application_rule_collections self.network_rule_collections = network_rule_collections self.ip_configurations = ip_configurations self.provisioning_state = provisioning_state class AzureFirewallApplicationRule(_serialization.Model): """Properties of an application rule. :ivar name: Name of the application rule. :vartype name: str :ivar description: Description of the rule. :vartype description: str :ivar source_addresses: List of source IP addresses for this rule. :vartype source_addresses: list[str] :ivar protocols: Array of ApplicationRuleProtocols. :vartype protocols: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleProtocol] :ivar target_urls: List of URLs for this rule. :vartype target_urls: list[str] """ _attribute_map = { "name": {"key": "name", "type": "str"}, "description": {"key": "description", "type": "str"}, "source_addresses": {"key": "sourceAddresses", "type": "[str]"}, "protocols": {"key": "protocols", "type": "[AzureFirewallApplicationRuleProtocol]"}, "target_urls": {"key": "targetUrls", "type": "[str]"}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, source_addresses: Optional[List[str]] = None, protocols: Optional[List["_models.AzureFirewallApplicationRuleProtocol"]] = None, target_urls: Optional[List[str]] = None, **kwargs ): """ :keyword name: Name of the application rule. :paramtype name: str :keyword description: Description of the rule. :paramtype description: str :keyword source_addresses: List of source IP addresses for this rule. :paramtype source_addresses: list[str] :keyword protocols: Array of ApplicationRuleProtocols. :paramtype protocols: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleProtocol] :keyword target_urls: List of URLs for this rule. :paramtype target_urls: list[str] """ super().__init__(**kwargs) self.name = name self.description = description self.source_addresses = source_addresses self.protocols = protocols self.target_urls = target_urls class AzureFirewallApplicationRuleCollection(SubResource): """Application rule collection resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar priority: Priority of the application rule collection resource. :vartype priority: int :ivar action: The action type of a rule collection. :vartype action: ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCAction :ivar rules: Collection of rules used by a application rule collection. :vartype rules: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRule] :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "etag": {"readonly": True}, "priority": {"maximum": 65000, "minimum": 100}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "priority": {"key": "properties.priority", "type": "int"}, "action": {"key": "properties.action", "type": "AzureFirewallRCAction"}, "rules": {"key": "properties.rules", "type": "[AzureFirewallApplicationRule]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, priority: Optional[int] = None, action: Optional["_models.AzureFirewallRCAction"] = None, rules: Optional[List["_models.AzureFirewallApplicationRule"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword priority: Priority of the application rule collection resource. :paramtype priority: int :keyword action: The action type of a rule collection. :paramtype action: ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCAction :keyword rules: Collection of rules used by a application rule collection. :paramtype rules: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRule] :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.priority = priority self.action = action self.rules = rules self.provisioning_state = provisioning_state class AzureFirewallApplicationRuleProtocol(_serialization.Model): """Properties of the application rule protocol. :ivar protocol_type: Protocol type. Known values are: "Http" and "Https". :vartype protocol_type: str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleProtocolType :ivar port: Port number for the protocol, cannot be greater than 64000. This field is optional. :vartype port: int """ _validation = { "port": {"maximum": 64000, "minimum": 0}, } _attribute_map = { "protocol_type": {"key": "protocolType", "type": "str"}, "port": {"key": "port", "type": "int"}, } def __init__( self, *, protocol_type: Optional[Union[str, "_models.AzureFirewallApplicationRuleProtocolType"]] = None, port: Optional[int] = None, **kwargs ): """ :keyword protocol_type: Protocol type. Known values are: "Http" and "Https". :paramtype protocol_type: str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleProtocolType :keyword port: Port number for the protocol, cannot be greater than 64000. This field is optional. :paramtype port: int """ super().__init__(**kwargs) self.protocol_type = protocol_type self.port = port class AzureFirewallIPConfiguration(SubResource): """IP configuration of an Azure Firewall. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar private_ip_address: The Firewall Internal Load Balancer IP to be used as the next hop in User Defined Routes. :vartype private_ip_address: str :ivar subnet: Reference of the subnet resource. This resource must be named 'AzureFirewallSubnet'. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar internal_public_ip_address: Reference of the PublicIP resource. This field is a mandatory input. :vartype internal_public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar public_ip_address: Reference of the PublicIP resource. This field is populated in the output. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "private_ip_address": {"key": "properties.privateIPAddress", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "SubResource"}, "internal_public_ip_address": {"key": "properties.internalPublicIpAddress", "type": "SubResource"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, private_ip_address: Optional[str] = None, subnet: Optional["_models.SubResource"] = None, internal_public_ip_address: Optional["_models.SubResource"] = None, public_ip_address: Optional["_models.SubResource"] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword private_ip_address: The Firewall Internal Load Balancer IP to be used as the next hop in User Defined Routes. :paramtype private_ip_address: str :keyword subnet: Reference of the subnet resource. This resource must be named 'AzureFirewallSubnet'. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword internal_public_ip_address: Reference of the PublicIP resource. This field is a mandatory input. :paramtype internal_public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword public_ip_address: Reference of the PublicIP resource. This field is populated in the output. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.private_ip_address = private_ip_address self.subnet = subnet self.internal_public_ip_address = internal_public_ip_address self.public_ip_address = public_ip_address self.provisioning_state = provisioning_state class AzureFirewallListResult(_serialization.Model): """Response for ListAzureFirewalls API service call. :ivar value: List of a Azure Firewalls in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewall] :ivar next_link: URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[AzureFirewall]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.AzureFirewall"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of a Azure Firewalls in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewall] :keyword next_link: URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class AzureFirewallNetworkRule(_serialization.Model): """Properties of the network rule. :ivar name: Name of the network rule. :vartype name: str :ivar description: Description of the rule. :vartype description: str :ivar protocols: Array of AzureFirewallNetworkRuleProtocols. :vartype protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRuleProtocol] :ivar source_addresses: List of source IP addresses for this rule. :vartype source_addresses: list[str] :ivar destination_addresses: List of destination IP addresses. :vartype destination_addresses: list[str] :ivar destination_ports: List of destination ports. :vartype destination_ports: list[str] """ _attribute_map = { "name": {"key": "name", "type": "str"}, "description": {"key": "description", "type": "str"}, "protocols": {"key": "protocols", "type": "[str]"}, "source_addresses": {"key": "sourceAddresses", "type": "[str]"}, "destination_addresses": {"key": "destinationAddresses", "type": "[str]"}, "destination_ports": {"key": "destinationPorts", "type": "[str]"}, } def __init__( self, *, name: Optional[str] = None, description: Optional[str] = None, protocols: Optional[List[Union[str, "_models.AzureFirewallNetworkRuleProtocol"]]] = None, source_addresses: Optional[List[str]] = None, destination_addresses: Optional[List[str]] = None, destination_ports: Optional[List[str]] = None, **kwargs ): """ :keyword name: Name of the network rule. :paramtype name: str :keyword description: Description of the rule. :paramtype description: str :keyword protocols: Array of AzureFirewallNetworkRuleProtocols. :paramtype protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRuleProtocol] :keyword source_addresses: List of source IP addresses for this rule. :paramtype source_addresses: list[str] :keyword destination_addresses: List of destination IP addresses. :paramtype destination_addresses: list[str] :keyword destination_ports: List of destination ports. :paramtype destination_ports: list[str] """ super().__init__(**kwargs) self.name = name self.description = description self.protocols = protocols self.source_addresses = source_addresses self.destination_addresses = destination_addresses self.destination_ports = destination_ports class AzureFirewallNetworkRuleCollection(SubResource): """Network rule collection resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar priority: Priority of the network rule collection resource. :vartype priority: int :ivar action: The action type of a rule collection. :vartype action: ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCAction :ivar rules: Collection of rules used by a network rule collection. :vartype rules: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRule] :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "etag": {"readonly": True}, "priority": {"maximum": 65000, "minimum": 100}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "priority": {"key": "properties.priority", "type": "int"}, "action": {"key": "properties.action", "type": "AzureFirewallRCAction"}, "rules": {"key": "properties.rules", "type": "[AzureFirewallNetworkRule]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, priority: Optional[int] = None, action: Optional["_models.AzureFirewallRCAction"] = None, rules: Optional[List["_models.AzureFirewallNetworkRule"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword priority: Priority of the network rule collection resource. :paramtype priority: int :keyword action: The action type of a rule collection. :paramtype action: ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCAction :keyword rules: Collection of rules used by a network rule collection. :paramtype rules: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRule] :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.priority = priority self.action = action self.rules = rules self.provisioning_state = provisioning_state class AzureFirewallRCAction(_serialization.Model): """Properties of the AzureFirewallRCAction. :ivar type: The type of action. Known values are: "Allow" and "Deny". :vartype type: str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCActionType """ _attribute_map = { "type": {"key": "type", "type": "str"}, } def __init__(self, *, type: Optional[Union[str, "_models.AzureFirewallRCActionType"]] = None, **kwargs): """ :keyword type: The type of action. Known values are: "Allow" and "Deny". :paramtype type: str or ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCActionType """ super().__init__(**kwargs) self.type = type class AzureReachabilityReport(_serialization.Model): """Azure reachability report details. All required parameters must be populated in order to send to Azure. :ivar aggregation_level: The aggregation level of Azure reachability report. Can be Country, State or City. Required. :vartype aggregation_level: str :ivar provider_location: Parameters that define a geographic location. Required. :vartype provider_location: ~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLocation :ivar reachability_report: List of Azure reachability report items. Required. :vartype reachability_report: list[~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportItem] """ _validation = { "aggregation_level": {"required": True}, "provider_location": {"required": True}, "reachability_report": {"required": True}, } _attribute_map = { "aggregation_level": {"key": "aggregationLevel", "type": "str"}, "provider_location": {"key": "providerLocation", "type": "AzureReachabilityReportLocation"}, "reachability_report": {"key": "reachabilityReport", "type": "[AzureReachabilityReportItem]"}, } def __init__( self, *, aggregation_level: str, provider_location: "_models.AzureReachabilityReportLocation", reachability_report: List["_models.AzureReachabilityReportItem"], **kwargs ): """ :keyword aggregation_level: The aggregation level of Azure reachability report. Can be Country, State or City. Required. :paramtype aggregation_level: str :keyword provider_location: Parameters that define a geographic location. Required. :paramtype provider_location: ~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLocation :keyword reachability_report: List of Azure reachability report items. Required. :paramtype reachability_report: list[~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportItem] """ super().__init__(**kwargs) self.aggregation_level = aggregation_level self.provider_location = provider_location self.reachability_report = reachability_report class AzureReachabilityReportItem(_serialization.Model): """Azure reachability report details for a given provider location. :ivar provider: The Internet service provider. :vartype provider: str :ivar azure_location: The Azure region. :vartype azure_location: str :ivar latencies: List of latency details for each of the time series. :vartype latencies: list[~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLatencyInfo] """ _attribute_map = { "provider": {"key": "provider", "type": "str"}, "azure_location": {"key": "azureLocation", "type": "str"}, "latencies": {"key": "latencies", "type": "[AzureReachabilityReportLatencyInfo]"}, } def __init__( self, *, provider: Optional[str] = None, azure_location: Optional[str] = None, latencies: Optional[List["_models.AzureReachabilityReportLatencyInfo"]] = None, **kwargs ): """ :keyword provider: The Internet service provider. :paramtype provider: str :keyword azure_location: The Azure region. :paramtype azure_location: str :keyword latencies: List of latency details for each of the time series. :paramtype latencies: list[~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLatencyInfo] """ super().__init__(**kwargs) self.provider = provider self.azure_location = azure_location self.latencies = latencies class AzureReachabilityReportLatencyInfo(_serialization.Model): """Details on latency for a time series. :ivar time_stamp: The time stamp. :vartype time_stamp: ~datetime.datetime :ivar score: The relative latency score between 1 and 100, higher values indicating a faster connection. :vartype score: int """ _validation = { "score": {"maximum": 100, "minimum": 1}, } _attribute_map = { "time_stamp": {"key": "timeStamp", "type": "iso-8601"}, "score": {"key": "score", "type": "int"}, } def __init__(self, *, time_stamp: Optional[datetime.datetime] = None, score: Optional[int] = None, **kwargs): """ :keyword time_stamp: The time stamp. :paramtype time_stamp: ~datetime.datetime :keyword score: The relative latency score between 1 and 100, higher values indicating a faster connection. :paramtype score: int """ super().__init__(**kwargs) self.time_stamp = time_stamp self.score = score class AzureReachabilityReportLocation(_serialization.Model): """Parameters that define a geographic location. All required parameters must be populated in order to send to Azure. :ivar country: The name of the country. Required. :vartype country: str :ivar state: The name of the state. :vartype state: str :ivar city: The name of the city or town. :vartype city: str """ _validation = { "country": {"required": True}, } _attribute_map = { "country": {"key": "country", "type": "str"}, "state": {"key": "state", "type": "str"}, "city": {"key": "city", "type": "str"}, } def __init__(self, *, country: str, state: Optional[str] = None, city: Optional[str] = None, **kwargs): """ :keyword country: The name of the country. Required. :paramtype country: str :keyword state: The name of the state. :paramtype state: str :keyword city: The name of the city or town. :paramtype city: str """ super().__init__(**kwargs) self.country = country self.state = state self.city = city class AzureReachabilityReportParameters(_serialization.Model): """Geographic and time constraints for Azure reachability report. All required parameters must be populated in order to send to Azure. :ivar provider_location: Parameters that define a geographic location. Required. :vartype provider_location: ~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLocation :ivar providers: List of Internet service providers. :vartype providers: list[str] :ivar azure_locations: Optional Azure regions to scope the query to. :vartype azure_locations: list[str] :ivar start_time: The start time for the Azure reachability report. Required. :vartype start_time: ~datetime.datetime :ivar end_time: The end time for the Azure reachability report. Required. :vartype end_time: ~datetime.datetime """ _validation = { "provider_location": {"required": True}, "start_time": {"required": True}, "end_time": {"required": True}, } _attribute_map = { "provider_location": {"key": "providerLocation", "type": "AzureReachabilityReportLocation"}, "providers": {"key": "providers", "type": "[str]"}, "azure_locations": {"key": "azureLocations", "type": "[str]"}, "start_time": {"key": "startTime", "type": "iso-8601"}, "end_time": {"key": "endTime", "type": "iso-8601"}, } def __init__( self, *, provider_location: "_models.AzureReachabilityReportLocation", start_time: datetime.datetime, end_time: datetime.datetime, providers: Optional[List[str]] = None, azure_locations: Optional[List[str]] = None, **kwargs ): """ :keyword provider_location: Parameters that define a geographic location. Required. :paramtype provider_location: ~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportLocation :keyword providers: List of Internet service providers. :paramtype providers: list[str] :keyword azure_locations: Optional Azure regions to scope the query to. :paramtype azure_locations: list[str] :keyword start_time: The start time for the Azure reachability report. Required. :paramtype start_time: ~datetime.datetime :keyword end_time: The end time for the Azure reachability report. Required. :paramtype end_time: ~datetime.datetime """ super().__init__(**kwargs) self.provider_location = provider_location self.providers = providers self.azure_locations = azure_locations self.start_time = start_time self.end_time = end_time class BackendAddressPool(SubResource): """Pool of backend IP addresses. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar backend_ip_configurations: Gets collection of references to IP addresses defined in network interfaces. :vartype backend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :ivar load_balancing_rules: Gets load balancing rules that use this backend address pool. :vartype load_balancing_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar outbound_rule: Gets outbound rules that use this backend address pool. :vartype outbound_rule: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar outbound_rules: Gets outbound rules that use this backend address pool. :vartype outbound_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar provisioning_state: Get provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "backend_ip_configurations": {"readonly": True}, "load_balancing_rules": {"readonly": True}, "outbound_rule": {"readonly": True}, "outbound_rules": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "backend_ip_configurations": { "key": "properties.backendIPConfigurations", "type": "[NetworkInterfaceIPConfiguration]", }, "load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"}, "outbound_rule": {"key": "properties.outboundRule", "type": "SubResource"}, "outbound_rules": {"key": "properties.outboundRules", "type": "[SubResource]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword provisioning_state: Get provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.backend_ip_configurations = None self.load_balancing_rules = None self.outbound_rule = None self.outbound_rules = None self.provisioning_state = provisioning_state class BGPCommunity(_serialization.Model): """Contains bgp community information offered in Service Community resources. :ivar service_supported_region: The region which the service support. e.g. For O365, region is Global. :vartype service_supported_region: str :ivar community_name: The name of the bgp community. e.g. Skype. :vartype community_name: str :ivar community_value: The value of the bgp community. For more information: https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing. :vartype community_value: str :ivar community_prefixes: The prefixes that the bgp community contains. :vartype community_prefixes: list[str] :ivar is_authorized_to_use: Customer is authorized to use bgp community or not. :vartype is_authorized_to_use: bool :ivar service_group: The service group of the bgp community contains. :vartype service_group: str """ _attribute_map = { "service_supported_region": {"key": "serviceSupportedRegion", "type": "str"}, "community_name": {"key": "communityName", "type": "str"}, "community_value": {"key": "communityValue", "type": "str"}, "community_prefixes": {"key": "communityPrefixes", "type": "[str]"}, "is_authorized_to_use": {"key": "isAuthorizedToUse", "type": "bool"}, "service_group": {"key": "serviceGroup", "type": "str"}, } def __init__( self, *, service_supported_region: Optional[str] = None, community_name: Optional[str] = None, community_value: Optional[str] = None, community_prefixes: Optional[List[str]] = None, is_authorized_to_use: Optional[bool] = None, service_group: Optional[str] = None, **kwargs ): """ :keyword service_supported_region: The region which the service support. e.g. For O365, region is Global. :paramtype service_supported_region: str :keyword community_name: The name of the bgp community. e.g. Skype. :paramtype community_name: str :keyword community_value: The value of the bgp community. For more information: https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing. :paramtype community_value: str :keyword community_prefixes: The prefixes that the bgp community contains. :paramtype community_prefixes: list[str] :keyword is_authorized_to_use: Customer is authorized to use bgp community or not. :paramtype is_authorized_to_use: bool :keyword service_group: The service group of the bgp community contains. :paramtype service_group: str """ super().__init__(**kwargs) self.service_supported_region = service_supported_region self.community_name = community_name self.community_value = community_value self.community_prefixes = community_prefixes self.is_authorized_to_use = is_authorized_to_use self.service_group = service_group class BgpPeerStatus(_serialization.Model): """BGP peer status details. Variables are only populated by the server, and will be ignored when sending a request. :ivar local_address: The virtual network gateway's local address. :vartype local_address: str :ivar neighbor: The remote BGP peer. :vartype neighbor: str :ivar asn: The autonomous system number of the remote BGP peer. :vartype asn: int :ivar state: The BGP peer state. Known values are: "Unknown", "Stopped", "Idle", "Connecting", and "Connected". :vartype state: str or ~azure.mgmt.network.v2018_07_01.models.BgpPeerState :ivar connected_duration: For how long the peering has been up. :vartype connected_duration: str :ivar routes_received: The number of routes learned from this peer. :vartype routes_received: int :ivar messages_sent: The number of BGP messages sent. :vartype messages_sent: int :ivar messages_received: The number of BGP messages received. :vartype messages_received: int """ _validation = { "local_address": {"readonly": True}, "neighbor": {"readonly": True}, "asn": {"readonly": True}, "state": {"readonly": True}, "connected_duration": {"readonly": True}, "routes_received": {"readonly": True}, "messages_sent": {"readonly": True}, "messages_received": {"readonly": True}, } _attribute_map = { "local_address": {"key": "localAddress", "type": "str"}, "neighbor": {"key": "neighbor", "type": "str"}, "asn": {"key": "asn", "type": "int"}, "state": {"key": "state", "type": "str"}, "connected_duration": {"key": "connectedDuration", "type": "str"}, "routes_received": {"key": "routesReceived", "type": "int"}, "messages_sent": {"key": "messagesSent", "type": "int"}, "messages_received": {"key": "messagesReceived", "type": "int"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.local_address = None self.neighbor = None self.asn = None self.state = None self.connected_duration = None self.routes_received = None self.messages_sent = None self.messages_received = None class BgpPeerStatusListResult(_serialization.Model): """Response for list BGP peer status API service call. :ivar value: List of BGP peers. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.BgpPeerStatus] """ _attribute_map = { "value": {"key": "value", "type": "[BgpPeerStatus]"}, } def __init__(self, *, value: Optional[List["_models.BgpPeerStatus"]] = None, **kwargs): """ :keyword value: List of BGP peers. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.BgpPeerStatus] """ super().__init__(**kwargs) self.value = value class BgpServiceCommunity(Resource): """Service Community Properties. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar service_name: The name of the bgp community. e.g. Skype. :vartype service_name: str :ivar bgp_communities: Get a list of bgp communities. :vartype bgp_communities: list[~azure.mgmt.network.v2018_07_01.models.BGPCommunity] """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "service_name": {"key": "properties.serviceName", "type": "str"}, "bgp_communities": {"key": "properties.bgpCommunities", "type": "[BGPCommunity]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, service_name: Optional[str] = None, bgp_communities: Optional[List["_models.BGPCommunity"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword service_name: The name of the bgp community. e.g. Skype. :paramtype service_name: str :keyword bgp_communities: Get a list of bgp communities. :paramtype bgp_communities: list[~azure.mgmt.network.v2018_07_01.models.BGPCommunity] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.service_name = service_name self.bgp_communities = bgp_communities class BgpServiceCommunityListResult(_serialization.Model): """Response for the ListServiceCommunity API service call. :ivar value: A list of service community resources. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.BgpServiceCommunity] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[BgpServiceCommunity]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.BgpServiceCommunity"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of service community resources. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.BgpServiceCommunity] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class BgpSettings(_serialization.Model): """BGP settings details. :ivar asn: The BGP speaker's ASN. :vartype asn: int :ivar bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker. :vartype bgp_peering_address: str :ivar peer_weight: The weight added to routes learned from this BGP speaker. :vartype peer_weight: int """ _attribute_map = { "asn": {"key": "asn", "type": "int"}, "bgp_peering_address": {"key": "bgpPeeringAddress", "type": "str"}, "peer_weight": {"key": "peerWeight", "type": "int"}, } def __init__( self, *, asn: Optional[int] = None, bgp_peering_address: Optional[str] = None, peer_weight: Optional[int] = None, **kwargs ): """ :keyword asn: The BGP speaker's ASN. :paramtype asn: int :keyword bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker. :paramtype bgp_peering_address: str :keyword peer_weight: The weight added to routes learned from this BGP speaker. :paramtype peer_weight: int """ super().__init__(**kwargs) self.asn = asn self.bgp_peering_address = bgp_peering_address self.peer_weight = peer_weight class ConnectionMonitor(_serialization.Model): """Parameters that define the operation to create a connection monitor. All required parameters must be populated in order to send to Azure. :ivar location: Connection monitor location. :vartype location: str :ivar tags: Connection monitor tags. :vartype tags: dict[str, str] :ivar source: Describes the source of connection monitor. Required. :vartype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :ivar destination: Describes the destination of connection monitor. Required. :vartype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :ivar auto_start: Determines if the connection monitor will start automatically once created. :vartype auto_start: bool :ivar monitoring_interval_in_seconds: Monitoring interval in seconds. :vartype monitoring_interval_in_seconds: int """ _validation = { "source": {"required": True}, "destination": {"required": True}, } _attribute_map = { "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "source": {"key": "properties.source", "type": "ConnectionMonitorSource"}, "destination": {"key": "properties.destination", "type": "ConnectionMonitorDestination"}, "auto_start": {"key": "properties.autoStart", "type": "bool"}, "monitoring_interval_in_seconds": {"key": "properties.monitoringIntervalInSeconds", "type": "int"}, } def __init__( self, *, source: "_models.ConnectionMonitorSource", destination: "_models.ConnectionMonitorDestination", location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, auto_start: bool = True, monitoring_interval_in_seconds: int = 60, **kwargs ): """ :keyword location: Connection monitor location. :paramtype location: str :keyword tags: Connection monitor tags. :paramtype tags: dict[str, str] :keyword source: Describes the source of connection monitor. Required. :paramtype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :keyword destination: Describes the destination of connection monitor. Required. :paramtype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :keyword auto_start: Determines if the connection monitor will start automatically once created. :paramtype auto_start: bool :keyword monitoring_interval_in_seconds: Monitoring interval in seconds. :paramtype monitoring_interval_in_seconds: int """ super().__init__(**kwargs) self.location = location self.tags = tags self.source = source self.destination = destination self.auto_start = auto_start self.monitoring_interval_in_seconds = monitoring_interval_in_seconds class ConnectionMonitorDestination(_serialization.Model): """Describes the destination of connection monitor. :ivar resource_id: The ID of the resource used as the destination by connection monitor. :vartype resource_id: str :ivar address: Address of the connection monitor destination (IP or domain name). :vartype address: str :ivar port: The destination port used by connection monitor. :vartype port: int """ _attribute_map = { "resource_id": {"key": "resourceId", "type": "str"}, "address": {"key": "address", "type": "str"}, "port": {"key": "port", "type": "int"}, } def __init__( self, *, resource_id: Optional[str] = None, address: Optional[str] = None, port: Optional[int] = None, **kwargs ): """ :keyword resource_id: The ID of the resource used as the destination by connection monitor. :paramtype resource_id: str :keyword address: Address of the connection monitor destination (IP or domain name). :paramtype address: str :keyword port: The destination port used by connection monitor. :paramtype port: int """ super().__init__(**kwargs) self.resource_id = resource_id self.address = address self.port = port class ConnectionMonitorListResult(_serialization.Model): """List of connection monitors. :ivar value: Information about connection monitors. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorResult] """ _attribute_map = { "value": {"key": "value", "type": "[ConnectionMonitorResult]"}, } def __init__(self, *, value: Optional[List["_models.ConnectionMonitorResult"]] = None, **kwargs): """ :keyword value: Information about connection monitors. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorResult] """ super().__init__(**kwargs) self.value = value class ConnectionMonitorParameters(_serialization.Model): """Parameters that define the operation to create a connection monitor. All required parameters must be populated in order to send to Azure. :ivar source: Describes the source of connection monitor. Required. :vartype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :ivar destination: Describes the destination of connection monitor. Required. :vartype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :ivar auto_start: Determines if the connection monitor will start automatically once created. :vartype auto_start: bool :ivar monitoring_interval_in_seconds: Monitoring interval in seconds. :vartype monitoring_interval_in_seconds: int """ _validation = { "source": {"required": True}, "destination": {"required": True}, } _attribute_map = { "source": {"key": "source", "type": "ConnectionMonitorSource"}, "destination": {"key": "destination", "type": "ConnectionMonitorDestination"}, "auto_start": {"key": "autoStart", "type": "bool"}, "monitoring_interval_in_seconds": {"key": "monitoringIntervalInSeconds", "type": "int"}, } def __init__( self, *, source: "_models.ConnectionMonitorSource", destination: "_models.ConnectionMonitorDestination", auto_start: bool = True, monitoring_interval_in_seconds: int = 60, **kwargs ): """ :keyword source: Describes the source of connection monitor. Required. :paramtype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :keyword destination: Describes the destination of connection monitor. Required. :paramtype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :keyword auto_start: Determines if the connection monitor will start automatically once created. :paramtype auto_start: bool :keyword monitoring_interval_in_seconds: Monitoring interval in seconds. :paramtype monitoring_interval_in_seconds: int """ super().__init__(**kwargs) self.source = source self.destination = destination self.auto_start = auto_start self.monitoring_interval_in_seconds = monitoring_interval_in_seconds class ConnectionMonitorQueryResult(_serialization.Model): """List of connection states snapshots. :ivar source_status: Status of connection monitor source. Known values are: "Uknown", "Active", and "Inactive". :vartype source_status: str or ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSourceStatus :ivar states: Information about connection states. :vartype states: list[~azure.mgmt.network.v2018_07_01.models.ConnectionStateSnapshot] """ _attribute_map = { "source_status": {"key": "sourceStatus", "type": "str"}, "states": {"key": "states", "type": "[ConnectionStateSnapshot]"}, } def __init__( self, *, source_status: Optional[Union[str, "_models.ConnectionMonitorSourceStatus"]] = None, states: Optional[List["_models.ConnectionStateSnapshot"]] = None, **kwargs ): """ :keyword source_status: Status of connection monitor source. Known values are: "Uknown", "Active", and "Inactive". :paramtype source_status: str or ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSourceStatus :keyword states: Information about connection states. :paramtype states: list[~azure.mgmt.network.v2018_07_01.models.ConnectionStateSnapshot] """ super().__init__(**kwargs) self.source_status = source_status self.states = states class ConnectionMonitorResult(_serialization.Model): # pylint: disable=too-many-instance-attributes """Information about the connection monitor. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the connection monitor. :vartype name: str :ivar id: ID of the connection monitor. :vartype id: str :ivar etag: :vartype etag: str :ivar type: Connection monitor type. :vartype type: str :ivar location: Connection monitor location. :vartype location: str :ivar tags: Connection monitor tags. :vartype tags: dict[str, str] :ivar source: Describes the source of connection monitor. :vartype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :ivar destination: Describes the destination of connection monitor. :vartype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :ivar auto_start: Determines if the connection monitor will start automatically once created. :vartype auto_start: bool :ivar monitoring_interval_in_seconds: Monitoring interval in seconds. :vartype monitoring_interval_in_seconds: int :ivar provisioning_state: The provisioning state of the connection monitor. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :ivar start_time: The date and time when the connection monitor was started. :vartype start_time: ~datetime.datetime :ivar monitoring_status: The monitoring status of the connection monitor. :vartype monitoring_status: str """ _validation = { "name": {"readonly": True}, "id": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "name": {"key": "name", "type": "str"}, "id": {"key": "id", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "source": {"key": "properties.source", "type": "ConnectionMonitorSource"}, "destination": {"key": "properties.destination", "type": "ConnectionMonitorDestination"}, "auto_start": {"key": "properties.autoStart", "type": "bool"}, "monitoring_interval_in_seconds": {"key": "properties.monitoringIntervalInSeconds", "type": "int"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "start_time": {"key": "properties.startTime", "type": "iso-8601"}, "monitoring_status": {"key": "properties.monitoringStatus", "type": "str"}, } def __init__( self, *, etag: str = "A unique read-only string that changes whenever the resource is updated.", location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, source: Optional["_models.ConnectionMonitorSource"] = None, destination: Optional["_models.ConnectionMonitorDestination"] = None, auto_start: bool = True, monitoring_interval_in_seconds: int = 60, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, start_time: Optional[datetime.datetime] = None, monitoring_status: Optional[str] = None, **kwargs ): """ :keyword etag: :paramtype etag: str :keyword location: Connection monitor location. :paramtype location: str :keyword tags: Connection monitor tags. :paramtype tags: dict[str, str] :keyword source: Describes the source of connection monitor. :paramtype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :keyword destination: Describes the destination of connection monitor. :paramtype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :keyword auto_start: Determines if the connection monitor will start automatically once created. :paramtype auto_start: bool :keyword monitoring_interval_in_seconds: Monitoring interval in seconds. :paramtype monitoring_interval_in_seconds: int :keyword provisioning_state: The provisioning state of the connection monitor. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :keyword start_time: The date and time when the connection monitor was started. :paramtype start_time: ~datetime.datetime :keyword monitoring_status: The monitoring status of the connection monitor. :paramtype monitoring_status: str """ super().__init__(**kwargs) self.name = None self.id = None self.etag = etag self.type = None self.location = location self.tags = tags self.source = source self.destination = destination self.auto_start = auto_start self.monitoring_interval_in_seconds = monitoring_interval_in_seconds self.provisioning_state = provisioning_state self.start_time = start_time self.monitoring_status = monitoring_status class ConnectionMonitorResultProperties(ConnectionMonitorParameters): """Describes the properties of a connection monitor. All required parameters must be populated in order to send to Azure. :ivar source: Describes the source of connection monitor. Required. :vartype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :ivar destination: Describes the destination of connection monitor. Required. :vartype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :ivar auto_start: Determines if the connection monitor will start automatically once created. :vartype auto_start: bool :ivar monitoring_interval_in_seconds: Monitoring interval in seconds. :vartype monitoring_interval_in_seconds: int :ivar provisioning_state: The provisioning state of the connection monitor. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :ivar start_time: The date and time when the connection monitor was started. :vartype start_time: ~datetime.datetime :ivar monitoring_status: The monitoring status of the connection monitor. :vartype monitoring_status: str """ _validation = { "source": {"required": True}, "destination": {"required": True}, } _attribute_map = { "source": {"key": "source", "type": "ConnectionMonitorSource"}, "destination": {"key": "destination", "type": "ConnectionMonitorDestination"}, "auto_start": {"key": "autoStart", "type": "bool"}, "monitoring_interval_in_seconds": {"key": "monitoringIntervalInSeconds", "type": "int"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, "start_time": {"key": "startTime", "type": "iso-8601"}, "monitoring_status": {"key": "monitoringStatus", "type": "str"}, } def __init__( self, *, source: "_models.ConnectionMonitorSource", destination: "_models.ConnectionMonitorDestination", auto_start: bool = True, monitoring_interval_in_seconds: int = 60, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, start_time: Optional[datetime.datetime] = None, monitoring_status: Optional[str] = None, **kwargs ): """ :keyword source: Describes the source of connection monitor. Required. :paramtype source: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorSource :keyword destination: Describes the destination of connection monitor. Required. :paramtype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectionMonitorDestination :keyword auto_start: Determines if the connection monitor will start automatically once created. :paramtype auto_start: bool :keyword monitoring_interval_in_seconds: Monitoring interval in seconds. :paramtype monitoring_interval_in_seconds: int :keyword provisioning_state: The provisioning state of the connection monitor. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :keyword start_time: The date and time when the connection monitor was started. :paramtype start_time: ~datetime.datetime :keyword monitoring_status: The monitoring status of the connection monitor. :paramtype monitoring_status: str """ super().__init__( source=source, destination=destination, auto_start=auto_start, monitoring_interval_in_seconds=monitoring_interval_in_seconds, **kwargs ) self.provisioning_state = provisioning_state self.start_time = start_time self.monitoring_status = monitoring_status class ConnectionMonitorSource(_serialization.Model): """Describes the source of connection monitor. All required parameters must be populated in order to send to Azure. :ivar resource_id: The ID of the resource used as the source by connection monitor. Required. :vartype resource_id: str :ivar port: The source port used by connection monitor. :vartype port: int """ _validation = { "resource_id": {"required": True}, } _attribute_map = { "resource_id": {"key": "resourceId", "type": "str"}, "port": {"key": "port", "type": "int"}, } def __init__(self, *, resource_id: str, port: Optional[int] = None, **kwargs): """ :keyword resource_id: The ID of the resource used as the source by connection monitor. Required. :paramtype resource_id: str :keyword port: The source port used by connection monitor. :paramtype port: int """ super().__init__(**kwargs) self.resource_id = resource_id self.port = port class ConnectionResetSharedKey(_serialization.Model): """The virtual network connection reset shared key. All required parameters must be populated in order to send to Azure. :ivar key_length: The virtual network connection reset shared key length, should between 1 and 128. Required. :vartype key_length: int """ _validation = { "key_length": {"required": True, "maximum": 128, "minimum": 1}, } _attribute_map = { "key_length": {"key": "keyLength", "type": "int"}, } def __init__(self, *, key_length: int, **kwargs): """ :keyword key_length: The virtual network connection reset shared key length, should between 1 and 128. Required. :paramtype key_length: int """ super().__init__(**kwargs) self.key_length = key_length class ConnectionSharedKey(SubResource): """Response for GetConnectionSharedKey API service call. All required parameters must be populated in order to send to Azure. :ivar id: Resource ID. :vartype id: str :ivar value: The virtual network connection shared key value. Required. :vartype value: str """ _validation = { "value": {"required": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "value": {"key": "value", "type": "str"}, } def __init__(self, *, value: str, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: Resource ID. :paramtype id: str :keyword value: The virtual network connection shared key value. Required. :paramtype value: str """ super().__init__(id=id, **kwargs) self.value = value class ConnectionStateSnapshot(_serialization.Model): """Connection state snapshot. Variables are only populated by the server, and will be ignored when sending a request. :ivar connection_state: The connection state. Known values are: "Reachable", "Unreachable", and "Unknown". :vartype connection_state: str or ~azure.mgmt.network.v2018_07_01.models.ConnectionState :ivar start_time: The start time of the connection snapshot. :vartype start_time: ~datetime.datetime :ivar end_time: The end time of the connection snapshot. :vartype end_time: ~datetime.datetime :ivar evaluation_state: Connectivity analysis evaluation state. Known values are: "NotStarted", "InProgress", and "Completed". :vartype evaluation_state: str or ~azure.mgmt.network.v2018_07_01.models.EvaluationState :ivar avg_latency_in_ms: Average latency in ms. :vartype avg_latency_in_ms: int :ivar min_latency_in_ms: Minimum latency in ms. :vartype min_latency_in_ms: int :ivar max_latency_in_ms: Maximum latency in ms. :vartype max_latency_in_ms: int :ivar probes_sent: The number of sent probes. :vartype probes_sent: int :ivar probes_failed: The number of failed probes. :vartype probes_failed: int :ivar hops: List of hops between the source and the destination. :vartype hops: list[~azure.mgmt.network.v2018_07_01.models.ConnectivityHop] """ _validation = { "hops": {"readonly": True}, } _attribute_map = { "connection_state": {"key": "connectionState", "type": "str"}, "start_time": {"key": "startTime", "type": "iso-8601"}, "end_time": {"key": "endTime", "type": "iso-8601"}, "evaluation_state": {"key": "evaluationState", "type": "str"}, "avg_latency_in_ms": {"key": "avgLatencyInMs", "type": "int"}, "min_latency_in_ms": {"key": "minLatencyInMs", "type": "int"}, "max_latency_in_ms": {"key": "maxLatencyInMs", "type": "int"}, "probes_sent": {"key": "probesSent", "type": "int"}, "probes_failed": {"key": "probesFailed", "type": "int"}, "hops": {"key": "hops", "type": "[ConnectivityHop]"}, } def __init__( self, *, connection_state: Optional[Union[str, "_models.ConnectionState"]] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, evaluation_state: Optional[Union[str, "_models.EvaluationState"]] = None, avg_latency_in_ms: Optional[int] = None, min_latency_in_ms: Optional[int] = None, max_latency_in_ms: Optional[int] = None, probes_sent: Optional[int] = None, probes_failed: Optional[int] = None, **kwargs ): """ :keyword connection_state: The connection state. Known values are: "Reachable", "Unreachable", and "Unknown". :paramtype connection_state: str or ~azure.mgmt.network.v2018_07_01.models.ConnectionState :keyword start_time: The start time of the connection snapshot. :paramtype start_time: ~datetime.datetime :keyword end_time: The end time of the connection snapshot. :paramtype end_time: ~datetime.datetime :keyword evaluation_state: Connectivity analysis evaluation state. Known values are: "NotStarted", "InProgress", and "Completed". :paramtype evaluation_state: str or ~azure.mgmt.network.v2018_07_01.models.EvaluationState :keyword avg_latency_in_ms: Average latency in ms. :paramtype avg_latency_in_ms: int :keyword min_latency_in_ms: Minimum latency in ms. :paramtype min_latency_in_ms: int :keyword max_latency_in_ms: Maximum latency in ms. :paramtype max_latency_in_ms: int :keyword probes_sent: The number of sent probes. :paramtype probes_sent: int :keyword probes_failed: The number of failed probes. :paramtype probes_failed: int """ super().__init__(**kwargs) self.connection_state = connection_state self.start_time = start_time self.end_time = end_time self.evaluation_state = evaluation_state self.avg_latency_in_ms = avg_latency_in_ms self.min_latency_in_ms = min_latency_in_ms self.max_latency_in_ms = max_latency_in_ms self.probes_sent = probes_sent self.probes_failed = probes_failed self.hops = None class ConnectivityDestination(_serialization.Model): """Parameters that define destination of connection. :ivar resource_id: The ID of the resource to which a connection attempt will be made. :vartype resource_id: str :ivar address: The IP address or URI the resource to which a connection attempt will be made. :vartype address: str :ivar port: Port on which check connectivity will be performed. :vartype port: int """ _attribute_map = { "resource_id": {"key": "resourceId", "type": "str"}, "address": {"key": "address", "type": "str"}, "port": {"key": "port", "type": "int"}, } def __init__( self, *, resource_id: Optional[str] = None, address: Optional[str] = None, port: Optional[int] = None, **kwargs ): """ :keyword resource_id: The ID of the resource to which a connection attempt will be made. :paramtype resource_id: str :keyword address: The IP address or URI the resource to which a connection attempt will be made. :paramtype address: str :keyword port: Port on which check connectivity will be performed. :paramtype port: int """ super().__init__(**kwargs) self.resource_id = resource_id self.address = address self.port = port class ConnectivityHop(_serialization.Model): """Information about a hop between the source and the destination. Variables are only populated by the server, and will be ignored when sending a request. :ivar type: The type of the hop. :vartype type: str :ivar id: The ID of the hop. :vartype id: str :ivar address: The IP address of the hop. :vartype address: str :ivar resource_id: The ID of the resource corresponding to this hop. :vartype resource_id: str :ivar next_hop_ids: List of next hop identifiers. :vartype next_hop_ids: list[str] :ivar issues: List of issues. :vartype issues: list[~azure.mgmt.network.v2018_07_01.models.ConnectivityIssue] """ _validation = { "type": {"readonly": True}, "id": {"readonly": True}, "address": {"readonly": True}, "resource_id": {"readonly": True}, "next_hop_ids": {"readonly": True}, "issues": {"readonly": True}, } _attribute_map = { "type": {"key": "type", "type": "str"}, "id": {"key": "id", "type": "str"}, "address": {"key": "address", "type": "str"}, "resource_id": {"key": "resourceId", "type": "str"}, "next_hop_ids": {"key": "nextHopIds", "type": "[str]"}, "issues": {"key": "issues", "type": "[ConnectivityIssue]"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.type = None self.id = None self.address = None self.resource_id = None self.next_hop_ids = None self.issues = None class ConnectivityInformation(_serialization.Model): """Information on the connectivity status. Variables are only populated by the server, and will be ignored when sending a request. :ivar hops: List of hops between the source and the destination. :vartype hops: list[~azure.mgmt.network.v2018_07_01.models.ConnectivityHop] :ivar connection_status: The connection status. Known values are: "Unknown", "Connected", "Disconnected", and "Degraded". :vartype connection_status: str or ~azure.mgmt.network.v2018_07_01.models.ConnectionStatus :ivar avg_latency_in_ms: Average latency in milliseconds. :vartype avg_latency_in_ms: int :ivar min_latency_in_ms: Minimum latency in milliseconds. :vartype min_latency_in_ms: int :ivar max_latency_in_ms: Maximum latency in milliseconds. :vartype max_latency_in_ms: int :ivar probes_sent: Total number of probes sent. :vartype probes_sent: int :ivar probes_failed: Number of failed probes. :vartype probes_failed: int """ _validation = { "hops": {"readonly": True}, "connection_status": {"readonly": True}, "avg_latency_in_ms": {"readonly": True}, "min_latency_in_ms": {"readonly": True}, "max_latency_in_ms": {"readonly": True}, "probes_sent": {"readonly": True}, "probes_failed": {"readonly": True}, } _attribute_map = { "hops": {"key": "hops", "type": "[ConnectivityHop]"}, "connection_status": {"key": "connectionStatus", "type": "str"}, "avg_latency_in_ms": {"key": "avgLatencyInMs", "type": "int"}, "min_latency_in_ms": {"key": "minLatencyInMs", "type": "int"}, "max_latency_in_ms": {"key": "maxLatencyInMs", "type": "int"}, "probes_sent": {"key": "probesSent", "type": "int"}, "probes_failed": {"key": "probesFailed", "type": "int"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.hops = None self.connection_status = None self.avg_latency_in_ms = None self.min_latency_in_ms = None self.max_latency_in_ms = None self.probes_sent = None self.probes_failed = None class ConnectivityIssue(_serialization.Model): """Information about an issue encountered in the process of checking for connectivity. Variables are only populated by the server, and will be ignored when sending a request. :ivar origin: The origin of the issue. Known values are: "Local", "Inbound", and "Outbound". :vartype origin: str or ~azure.mgmt.network.v2018_07_01.models.Origin :ivar severity: The severity of the issue. Known values are: "Error" and "Warning". :vartype severity: str or ~azure.mgmt.network.v2018_07_01.models.Severity :ivar type: The type of issue. Known values are: "Unknown", "AgentStopped", "GuestFirewall", "DnsResolution", "SocketBind", "NetworkSecurityRule", "UserDefinedRoute", "PortThrottled", and "Platform". :vartype type: str or ~azure.mgmt.network.v2018_07_01.models.IssueType :ivar context: Provides additional context on the issue. :vartype context: list[dict[str, str]] """ _validation = { "origin": {"readonly": True}, "severity": {"readonly": True}, "type": {"readonly": True}, "context": {"readonly": True}, } _attribute_map = { "origin": {"key": "origin", "type": "str"}, "severity": {"key": "severity", "type": "str"}, "type": {"key": "type", "type": "str"}, "context": {"key": "context", "type": "[{str}]"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.origin = None self.severity = None self.type = None self.context = None class ConnectivityParameters(_serialization.Model): """Parameters that determine how the connectivity check will be performed. All required parameters must be populated in order to send to Azure. :ivar source: Parameters that define the source of the connection. Required. :vartype source: ~azure.mgmt.network.v2018_07_01.models.ConnectivitySource :ivar destination: Parameters that define destination of connection. Required. :vartype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectivityDestination :ivar protocol: Network protocol. Known values are: "Tcp", "Http", "Https", and "Icmp". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.Protocol :ivar protocol_configuration: Configuration of the protocol. :vartype protocol_configuration: ~azure.mgmt.network.v2018_07_01.models.ProtocolConfiguration """ _validation = { "source": {"required": True}, "destination": {"required": True}, } _attribute_map = { "source": {"key": "source", "type": "ConnectivitySource"}, "destination": {"key": "destination", "type": "ConnectivityDestination"}, "protocol": {"key": "protocol", "type": "str"}, "protocol_configuration": {"key": "protocolConfiguration", "type": "ProtocolConfiguration"}, } def __init__( self, *, source: "_models.ConnectivitySource", destination: "_models.ConnectivityDestination", protocol: Optional[Union[str, "_models.Protocol"]] = None, protocol_configuration: Optional["_models.ProtocolConfiguration"] = None, **kwargs ): """ :keyword source: Parameters that define the source of the connection. Required. :paramtype source: ~azure.mgmt.network.v2018_07_01.models.ConnectivitySource :keyword destination: Parameters that define destination of connection. Required. :paramtype destination: ~azure.mgmt.network.v2018_07_01.models.ConnectivityDestination :keyword protocol: Network protocol. Known values are: "Tcp", "Http", "Https", and "Icmp". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.Protocol :keyword protocol_configuration: Configuration of the protocol. :paramtype protocol_configuration: ~azure.mgmt.network.v2018_07_01.models.ProtocolConfiguration """ super().__init__(**kwargs) self.source = source self.destination = destination self.protocol = protocol self.protocol_configuration = protocol_configuration class ConnectivitySource(_serialization.Model): """Parameters that define the source of the connection. All required parameters must be populated in order to send to Azure. :ivar resource_id: The ID of the resource from which a connectivity check will be initiated. Required. :vartype resource_id: str :ivar port: The source port from which a connectivity check will be performed. :vartype port: int """ _validation = { "resource_id": {"required": True}, } _attribute_map = { "resource_id": {"key": "resourceId", "type": "str"}, "port": {"key": "port", "type": "int"}, } def __init__(self, *, resource_id: str, port: Optional[int] = None, **kwargs): """ :keyword resource_id: The ID of the resource from which a connectivity check will be initiated. Required. :paramtype resource_id: str :keyword port: The source port from which a connectivity check will be performed. :paramtype port: int """ super().__init__(**kwargs) self.resource_id = resource_id self.port = port class DdosProtectionPlan(_serialization.Model): """A DDoS protection plan in a resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar resource_guid: The resource GUID property of the DDoS protection plan resource. It uniquely identifies the resource, even if the user changes its name or migrate the resource across subscriptions or resource groups. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the DDoS protection plan resource. Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar virtual_networks: The list of virtual networks associated with the DDoS protection plan resource. This list is read-only. :vartype virtual_networks: list[~azure.mgmt.network.v2018_07_01.models.SubResource] """ _validation = { "id": {"readonly": True}, "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, "resource_guid": {"readonly": True}, "provisioning_state": {"readonly": True}, "virtual_networks": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "virtual_networks": {"key": "properties.virtualNetworks", "type": "[SubResource]"}, } def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs): """ :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ super().__init__(**kwargs) self.id = None self.name = None self.type = None self.location = location self.tags = tags self.etag = None self.resource_guid = None self.provisioning_state = None self.virtual_networks = None class DdosProtectionPlanListResult(_serialization.Model): """A list of DDoS protection plans. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of DDoS protection plans. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.DdosProtectionPlan] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[DdosProtectionPlan]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.DdosProtectionPlan"]] = None, **kwargs): """ :keyword value: A list of DDoS protection plans. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.DdosProtectionPlan] """ super().__init__(**kwargs) self.value = value self.next_link = None class DeviceProperties(_serialization.Model): """List of properties of the device. :ivar device_vendor: Name of the device Vendor. :vartype device_vendor: str :ivar device_model: Model of the device. :vartype device_model: str :ivar link_speed_in_mbps: Link speed. :vartype link_speed_in_mbps: int """ _attribute_map = { "device_vendor": {"key": "deviceVendor", "type": "str"}, "device_model": {"key": "deviceModel", "type": "str"}, "link_speed_in_mbps": {"key": "linkSpeedInMbps", "type": "int"}, } def __init__( self, *, device_vendor: Optional[str] = None, device_model: Optional[str] = None, link_speed_in_mbps: Optional[int] = None, **kwargs ): """ :keyword device_vendor: Name of the device Vendor. :paramtype device_vendor: str :keyword device_model: Model of the device. :paramtype device_model: str :keyword link_speed_in_mbps: Link speed. :paramtype link_speed_in_mbps: int """ super().__init__(**kwargs) self.device_vendor = device_vendor self.device_model = device_model self.link_speed_in_mbps = link_speed_in_mbps class DhcpOptions(_serialization.Model): """DhcpOptions contains an array of DNS servers available to VMs deployed in the virtual network. Standard DHCP option for a subnet overrides VNET DHCP options. :ivar dns_servers: The list of DNS servers IP addresses. :vartype dns_servers: list[str] """ _attribute_map = { "dns_servers": {"key": "dnsServers", "type": "[str]"}, } def __init__(self, *, dns_servers: Optional[List[str]] = None, **kwargs): """ :keyword dns_servers: The list of DNS servers IP addresses. :paramtype dns_servers: list[str] """ super().__init__(**kwargs) self.dns_servers = dns_servers class Dimension(_serialization.Model): """Dimension of the metric. :ivar name: The name of the dimension. :vartype name: str :ivar display_name: The display name of the dimension. :vartype display_name: str :ivar internal_name: The internal name of the dimension. :vartype internal_name: str """ _attribute_map = { "name": {"key": "name", "type": "str"}, "display_name": {"key": "displayName", "type": "str"}, "internal_name": {"key": "internalName", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, internal_name: Optional[str] = None, **kwargs ): """ :keyword name: The name of the dimension. :paramtype name: str :keyword display_name: The display name of the dimension. :paramtype display_name: str :keyword internal_name: The internal name of the dimension. :paramtype internal_name: str """ super().__init__(**kwargs) self.name = name self.display_name = display_name self.internal_name = internal_name class DnsNameAvailabilityResult(_serialization.Model): """Response for the CheckDnsNameAvailability API service call. :ivar available: Domain availability (True/False). :vartype available: bool """ _attribute_map = { "available": {"key": "available", "type": "bool"}, } def __init__(self, *, available: Optional[bool] = None, **kwargs): """ :keyword available: Domain availability (True/False). :paramtype available: bool """ super().__init__(**kwargs) self.available = available class EffectiveNetworkSecurityGroup(_serialization.Model): """Effective network security group. :ivar network_security_group: The ID of network security group that is applied. :vartype network_security_group: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar association: Associated resources. :vartype association: ~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityGroupAssociation :ivar effective_security_rules: A collection of effective security rules. :vartype effective_security_rules: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityRule] :ivar tag_map: Mapping of tags to list of IP Addresses included within the tag. :vartype tag_map: str """ _attribute_map = { "network_security_group": {"key": "networkSecurityGroup", "type": "SubResource"}, "association": {"key": "association", "type": "EffectiveNetworkSecurityGroupAssociation"}, "effective_security_rules": {"key": "effectiveSecurityRules", "type": "[EffectiveNetworkSecurityRule]"}, "tag_map": {"key": "tagMap", "type": "str"}, } def __init__( self, *, network_security_group: Optional["_models.SubResource"] = None, association: Optional["_models.EffectiveNetworkSecurityGroupAssociation"] = None, effective_security_rules: Optional[List["_models.EffectiveNetworkSecurityRule"]] = None, tag_map: Optional[str] = None, **kwargs ): """ :keyword network_security_group: The ID of network security group that is applied. :paramtype network_security_group: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword association: Associated resources. :paramtype association: ~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityGroupAssociation :keyword effective_security_rules: A collection of effective security rules. :paramtype effective_security_rules: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityRule] :keyword tag_map: Mapping of tags to list of IP Addresses included within the tag. :paramtype tag_map: str """ super().__init__(**kwargs) self.network_security_group = network_security_group self.association = association self.effective_security_rules = effective_security_rules self.tag_map = tag_map class EffectiveNetworkSecurityGroupAssociation(_serialization.Model): """The effective network security group association. :ivar subnet: The ID of the subnet if assigned. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar network_interface: The ID of the network interface if assigned. :vartype network_interface: ~azure.mgmt.network.v2018_07_01.models.SubResource """ _attribute_map = { "subnet": {"key": "subnet", "type": "SubResource"}, "network_interface": {"key": "networkInterface", "type": "SubResource"}, } def __init__( self, *, subnet: Optional["_models.SubResource"] = None, network_interface: Optional["_models.SubResource"] = None, **kwargs ): """ :keyword subnet: The ID of the subnet if assigned. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword network_interface: The ID of the network interface if assigned. :paramtype network_interface: ~azure.mgmt.network.v2018_07_01.models.SubResource """ super().__init__(**kwargs) self.subnet = subnet self.network_interface = network_interface class EffectiveNetworkSecurityGroupListResult(_serialization.Model): """Response for list effective network security groups API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of effective network security groups. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityGroup] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[EffectiveNetworkSecurityGroup]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.EffectiveNetworkSecurityGroup"]] = None, **kwargs): """ :keyword value: A list of effective network security groups. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityGroup] """ super().__init__(**kwargs) self.value = value self.next_link = None class EffectiveNetworkSecurityRule(_serialization.Model): # pylint: disable=too-many-instance-attributes """Effective network security rules. :ivar name: The name of the security rule specified by the user (if created by the user). :vartype name: str :ivar protocol: The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp', and 'All'. Known values are: "Tcp", "Udp", and "All". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveSecurityRuleProtocol :ivar source_port_range: The source port or range. :vartype source_port_range: str :ivar destination_port_range: The destination port or range. :vartype destination_port_range: str :ivar source_port_ranges: The source port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*). :vartype source_port_ranges: list[str] :ivar destination_port_ranges: The destination port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*). :vartype destination_port_ranges: list[str] :ivar source_address_prefix: The source address prefix. :vartype source_address_prefix: str :ivar destination_address_prefix: The destination address prefix. :vartype destination_address_prefix: str :ivar source_address_prefixes: The source address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the asterisk (*). :vartype source_address_prefixes: list[str] :ivar destination_address_prefixes: The destination address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the asterisk (*). :vartype destination_address_prefixes: list[str] :ivar expanded_source_address_prefix: The expanded source address prefix. :vartype expanded_source_address_prefix: list[str] :ivar expanded_destination_address_prefix: Expanded destination address prefix. :vartype expanded_destination_address_prefix: list[str] :ivar access: Whether network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :vartype access: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess :ivar priority: The priority of the rule. :vartype priority: int :ivar direction: The direction of the rule. Possible values are: 'Inbound and Outbound'. Known values are: "Inbound" and "Outbound". :vartype direction: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleDirection """ _attribute_map = { "name": {"key": "name", "type": "str"}, "protocol": {"key": "protocol", "type": "str"}, "source_port_range": {"key": "sourcePortRange", "type": "str"}, "destination_port_range": {"key": "destinationPortRange", "type": "str"}, "source_port_ranges": {"key": "sourcePortRanges", "type": "[str]"}, "destination_port_ranges": {"key": "destinationPortRanges", "type": "[str]"}, "source_address_prefix": {"key": "sourceAddressPrefix", "type": "str"}, "destination_address_prefix": {"key": "destinationAddressPrefix", "type": "str"}, "source_address_prefixes": {"key": "sourceAddressPrefixes", "type": "[str]"}, "destination_address_prefixes": {"key": "destinationAddressPrefixes", "type": "[str]"}, "expanded_source_address_prefix": {"key": "expandedSourceAddressPrefix", "type": "[str]"}, "expanded_destination_address_prefix": {"key": "expandedDestinationAddressPrefix", "type": "[str]"}, "access": {"key": "access", "type": "str"}, "priority": {"key": "priority", "type": "int"}, "direction": {"key": "direction", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, protocol: Optional[Union[str, "_models.EffectiveSecurityRuleProtocol"]] = None, source_port_range: Optional[str] = None, destination_port_range: Optional[str] = None, source_port_ranges: Optional[List[str]] = None, destination_port_ranges: Optional[List[str]] = None, source_address_prefix: Optional[str] = None, destination_address_prefix: Optional[str] = None, source_address_prefixes: Optional[List[str]] = None, destination_address_prefixes: Optional[List[str]] = None, expanded_source_address_prefix: Optional[List[str]] = None, expanded_destination_address_prefix: Optional[List[str]] = None, access: Optional[Union[str, "_models.SecurityRuleAccess"]] = None, priority: Optional[int] = None, direction: Optional[Union[str, "_models.SecurityRuleDirection"]] = None, **kwargs ): """ :keyword name: The name of the security rule specified by the user (if created by the user). :paramtype name: str :keyword protocol: The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp', and 'All'. Known values are: "Tcp", "Udp", and "All". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveSecurityRuleProtocol :keyword source_port_range: The source port or range. :paramtype source_port_range: str :keyword destination_port_range: The destination port or range. :paramtype destination_port_range: str :keyword source_port_ranges: The source port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*). :paramtype source_port_ranges: list[str] :keyword destination_port_ranges: The destination port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*). :paramtype destination_port_ranges: list[str] :keyword source_address_prefix: The source address prefix. :paramtype source_address_prefix: str :keyword destination_address_prefix: The destination address prefix. :paramtype destination_address_prefix: str :keyword source_address_prefixes: The source address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the asterisk (*). :paramtype source_address_prefixes: list[str] :keyword destination_address_prefixes: The destination address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the asterisk (*). :paramtype destination_address_prefixes: list[str] :keyword expanded_source_address_prefix: The expanded source address prefix. :paramtype expanded_source_address_prefix: list[str] :keyword expanded_destination_address_prefix: Expanded destination address prefix. :paramtype expanded_destination_address_prefix: list[str] :keyword access: Whether network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :paramtype access: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess :keyword priority: The priority of the rule. :paramtype priority: int :keyword direction: The direction of the rule. Possible values are: 'Inbound and Outbound'. Known values are: "Inbound" and "Outbound". :paramtype direction: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleDirection """ super().__init__(**kwargs) self.name = name self.protocol = protocol self.source_port_range = source_port_range self.destination_port_range = destination_port_range self.source_port_ranges = source_port_ranges self.destination_port_ranges = destination_port_ranges self.source_address_prefix = source_address_prefix self.destination_address_prefix = destination_address_prefix self.source_address_prefixes = source_address_prefixes self.destination_address_prefixes = destination_address_prefixes self.expanded_source_address_prefix = expanded_source_address_prefix self.expanded_destination_address_prefix = expanded_destination_address_prefix self.access = access self.priority = priority self.direction = direction class EffectiveRoute(_serialization.Model): """Effective Route. :ivar name: The name of the user defined route. This is optional. :vartype name: str :ivar source: Who created the route. Possible values are: 'Unknown', 'User', 'VirtualNetworkGateway', and 'Default'. Known values are: "Unknown", "User", "VirtualNetworkGateway", and "Default". :vartype source: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveRouteSource :ivar state: The value of effective route. Possible values are: 'Active' and 'Invalid'. Known values are: "Active" and "Invalid". :vartype state: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveRouteState :ivar address_prefix: The address prefixes of the effective routes in CIDR notation. :vartype address_prefix: list[str] :ivar next_hop_ip_address: The IP address of the next hop of the effective route. :vartype next_hop_ip_address: list[str] :ivar next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None". :vartype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteNextHopType """ _attribute_map = { "name": {"key": "name", "type": "str"}, "source": {"key": "source", "type": "str"}, "state": {"key": "state", "type": "str"}, "address_prefix": {"key": "addressPrefix", "type": "[str]"}, "next_hop_ip_address": {"key": "nextHopIpAddress", "type": "[str]"}, "next_hop_type": {"key": "nextHopType", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, source: Optional[Union[str, "_models.EffectiveRouteSource"]] = None, state: Optional[Union[str, "_models.EffectiveRouteState"]] = None, address_prefix: Optional[List[str]] = None, next_hop_ip_address: Optional[List[str]] = None, next_hop_type: Optional[Union[str, "_models.RouteNextHopType"]] = None, **kwargs ): """ :keyword name: The name of the user defined route. This is optional. :paramtype name: str :keyword source: Who created the route. Possible values are: 'Unknown', 'User', 'VirtualNetworkGateway', and 'Default'. Known values are: "Unknown", "User", "VirtualNetworkGateway", and "Default". :paramtype source: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveRouteSource :keyword state: The value of effective route. Possible values are: 'Active' and 'Invalid'. Known values are: "Active" and "Invalid". :paramtype state: str or ~azure.mgmt.network.v2018_07_01.models.EffectiveRouteState :keyword address_prefix: The address prefixes of the effective routes in CIDR notation. :paramtype address_prefix: list[str] :keyword next_hop_ip_address: The IP address of the next hop of the effective route. :paramtype next_hop_ip_address: list[str] :keyword next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None". :paramtype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteNextHopType """ super().__init__(**kwargs) self.name = name self.source = source self.state = state self.address_prefix = address_prefix self.next_hop_ip_address = next_hop_ip_address self.next_hop_type = next_hop_type class EffectiveRouteListResult(_serialization.Model): """Response for list effective route API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of effective routes. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.EffectiveRoute] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[EffectiveRoute]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.EffectiveRoute"]] = None, **kwargs): """ :keyword value: A list of effective routes. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.EffectiveRoute] """ super().__init__(**kwargs) self.value = value self.next_link = None class EndpointServiceResult(SubResource): """Endpoint service. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the endpoint service. :vartype name: str :ivar type: Type of the endpoint service. :vartype type: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, } def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: Resource ID. :paramtype id: str """ super().__init__(id=id, **kwargs) self.name = None self.type = None class EndpointServicesListResult(_serialization.Model): """Response for the ListAvailableEndpointServices API service call. :ivar value: List of available endpoint services in a region. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.EndpointServiceResult] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[EndpointServiceResult]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.EndpointServiceResult"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of available endpoint services in a region. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.EndpointServiceResult] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class Error(_serialization.Model): """Error. :ivar code: :vartype code: str :ivar message: :vartype message: str :ivar target: :vartype target: str :ivar details: :vartype details: list[~azure.mgmt.network.v2018_07_01.models.ErrorDetails] :ivar inner_error: :vartype inner_error: str """ _attribute_map = { "code": {"key": "code", "type": "str"}, "message": {"key": "message", "type": "str"}, "target": {"key": "target", "type": "str"}, "details": {"key": "details", "type": "[ErrorDetails]"}, "inner_error": {"key": "innerError", "type": "str"}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, target: Optional[str] = None, details: Optional[List["_models.ErrorDetails"]] = None, inner_error: Optional[str] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword message: :paramtype message: str :keyword target: :paramtype target: str :keyword details: :paramtype details: list[~azure.mgmt.network.v2018_07_01.models.ErrorDetails] :keyword inner_error: :paramtype inner_error: str """ super().__init__(**kwargs) self.code = code self.message = message self.target = target self.details = details self.inner_error = inner_error class ErrorDetails(_serialization.Model): """ErrorDetails. :ivar code: :vartype code: str :ivar target: :vartype target: str :ivar message: :vartype message: str """ _attribute_map = { "code": {"key": "code", "type": "str"}, "target": {"key": "target", "type": "str"}, "message": {"key": "message", "type": "str"}, } def __init__( self, *, code: Optional[str] = None, target: Optional[str] = None, message: Optional[str] = None, **kwargs ): """ :keyword code: :paramtype code: str :keyword target: :paramtype target: str :keyword message: :paramtype message: str """ super().__init__(**kwargs) self.code = code self.target = target self.message = message class ErrorResponse(_serialization.Model): """The error object. :ivar error: :vartype error: ~azure.mgmt.network.v2018_07_01.models.ErrorDetails """ _attribute_map = { "error": {"key": "error", "type": "ErrorDetails"}, } def __init__(self, *, error: Optional["_models.ErrorDetails"] = None, **kwargs): """ :keyword error: :paramtype error: ~azure.mgmt.network.v2018_07_01.models.ErrorDetails """ super().__init__(**kwargs) self.error = error class EvaluatedNetworkSecurityGroup(_serialization.Model): """Results of network security group evaluation. Variables are only populated by the server, and will be ignored when sending a request. :ivar network_security_group_id: Network security group ID. :vartype network_security_group_id: str :ivar matched_rule: Matched rule. :vartype matched_rule: ~azure.mgmt.network.v2018_07_01.models.MatchedRule :ivar rules_evaluation_result: List of network security rules evaluation results. :vartype rules_evaluation_result: list[~azure.mgmt.network.v2018_07_01.models.NetworkSecurityRulesEvaluationResult] """ _validation = { "rules_evaluation_result": {"readonly": True}, } _attribute_map = { "network_security_group_id": {"key": "networkSecurityGroupId", "type": "str"}, "matched_rule": {"key": "matchedRule", "type": "MatchedRule"}, "rules_evaluation_result": {"key": "rulesEvaluationResult", "type": "[NetworkSecurityRulesEvaluationResult]"}, } def __init__( self, *, network_security_group_id: Optional[str] = None, matched_rule: Optional["_models.MatchedRule"] = None, **kwargs ): """ :keyword network_security_group_id: Network security group ID. :paramtype network_security_group_id: str :keyword matched_rule: Matched rule. :paramtype matched_rule: ~azure.mgmt.network.v2018_07_01.models.MatchedRule """ super().__init__(**kwargs) self.network_security_group_id = network_security_group_id self.matched_rule = matched_rule self.rules_evaluation_result = None class ExpressRouteCircuit(Resource): # pylint: disable=too-many-instance-attributes """ExpressRouteCircuit resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar sku: The SKU. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSku :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar allow_classic_operations: Allow classic operations. :vartype allow_classic_operations: bool :ivar circuit_provisioning_state: The CircuitProvisioningState state of the resource. :vartype circuit_provisioning_state: str :ivar service_provider_provisioning_state: The ServiceProviderProvisioningState state of the resource. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned', and 'Deprovisioning'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and "Deprovisioning". :vartype service_provider_provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ServiceProviderProvisioningState :ivar authorizations: The list of authorizations. :vartype authorizations: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitAuthorization] :ivar peerings: The list of peerings. :vartype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :ivar service_key: The ServiceKey. :vartype service_key: str :ivar service_provider_notes: The ServiceProviderNotes. :vartype service_provider_notes: str :ivar service_provider_properties: The ServiceProviderProperties. :vartype service_provider_properties: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitServiceProviderProperties :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar gateway_manager_etag: The GatewayManager Etag. :vartype gateway_manager_etag: str :ivar allow_global_reach: Flag to enable Global Reach on the circuit. :vartype allow_global_reach: bool """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "ExpressRouteCircuitSku"}, "etag": {"key": "etag", "type": "str"}, "allow_classic_operations": {"key": "properties.allowClassicOperations", "type": "bool"}, "circuit_provisioning_state": {"key": "properties.circuitProvisioningState", "type": "str"}, "service_provider_provisioning_state": {"key": "properties.serviceProviderProvisioningState", "type": "str"}, "authorizations": {"key": "properties.authorizations", "type": "[ExpressRouteCircuitAuthorization]"}, "peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"}, "service_key": {"key": "properties.serviceKey", "type": "str"}, "service_provider_notes": {"key": "properties.serviceProviderNotes", "type": "str"}, "service_provider_properties": { "key": "properties.serviceProviderProperties", "type": "ExpressRouteCircuitServiceProviderProperties", }, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "gateway_manager_etag": {"key": "properties.gatewayManagerEtag", "type": "str"}, "allow_global_reach": {"key": "properties.allowGlobalReach", "type": "bool"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.ExpressRouteCircuitSku"] = None, allow_classic_operations: Optional[bool] = None, circuit_provisioning_state: Optional[str] = None, service_provider_provisioning_state: Optional[Union[str, "_models.ServiceProviderProvisioningState"]] = None, authorizations: Optional[List["_models.ExpressRouteCircuitAuthorization"]] = None, peerings: Optional[List["_models.ExpressRouteCircuitPeering"]] = None, service_key: Optional[str] = None, service_provider_notes: Optional[str] = None, service_provider_properties: Optional["_models.ExpressRouteCircuitServiceProviderProperties"] = None, provisioning_state: Optional[str] = None, gateway_manager_etag: Optional[str] = None, allow_global_reach: Optional[bool] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword sku: The SKU. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSku :keyword allow_classic_operations: Allow classic operations. :paramtype allow_classic_operations: bool :keyword circuit_provisioning_state: The CircuitProvisioningState state of the resource. :paramtype circuit_provisioning_state: str :keyword service_provider_provisioning_state: The ServiceProviderProvisioningState state of the resource. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned', and 'Deprovisioning'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and "Deprovisioning". :paramtype service_provider_provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ServiceProviderProvisioningState :keyword authorizations: The list of authorizations. :paramtype authorizations: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitAuthorization] :keyword peerings: The list of peerings. :paramtype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :keyword service_key: The ServiceKey. :paramtype service_key: str :keyword service_provider_notes: The ServiceProviderNotes. :paramtype service_provider_notes: str :keyword service_provider_properties: The ServiceProviderProperties. :paramtype service_provider_properties: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitServiceProviderProperties :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str :keyword gateway_manager_etag: The GatewayManager Etag. :paramtype gateway_manager_etag: str :keyword allow_global_reach: Flag to enable Global Reach on the circuit. :paramtype allow_global_reach: bool """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.sku = sku self.etag = None self.allow_classic_operations = allow_classic_operations self.circuit_provisioning_state = circuit_provisioning_state self.service_provider_provisioning_state = service_provider_provisioning_state self.authorizations = authorizations self.peerings = peerings self.service_key = service_key self.service_provider_notes = service_provider_notes self.service_provider_properties = service_provider_properties self.provisioning_state = provisioning_state self.gateway_manager_etag = gateway_manager_etag self.allow_global_reach = allow_global_reach class ExpressRouteCircuitArpTable(_serialization.Model): """The ARP table associated with the ExpressRouteCircuit. :ivar age: Entry age in minutes. :vartype age: int :ivar interface: Interface address. :vartype interface: str :ivar ip_address: The IP address. :vartype ip_address: str :ivar mac_address: The MAC address. :vartype mac_address: str """ _attribute_map = { "age": {"key": "age", "type": "int"}, "interface": {"key": "interface", "type": "str"}, "ip_address": {"key": "ipAddress", "type": "str"}, "mac_address": {"key": "macAddress", "type": "str"}, } def __init__( self, *, age: Optional[int] = None, interface: Optional[str] = None, ip_address: Optional[str] = None, mac_address: Optional[str] = None, **kwargs ): """ :keyword age: Entry age in minutes. :paramtype age: int :keyword interface: Interface address. :paramtype interface: str :keyword ip_address: The IP address. :paramtype ip_address: str :keyword mac_address: The MAC address. :paramtype mac_address: str """ super().__init__(**kwargs) self.age = age self.interface = interface self.ip_address = ip_address self.mac_address = mac_address class ExpressRouteCircuitAuthorization(SubResource): """Authorization in an ExpressRouteCircuit resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar authorization_key: The authorization key. :vartype authorization_key: str :ivar authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'. Known values are: "Available" and "InUse". :vartype authorization_use_status: str or ~azure.mgmt.network.v2018_07_01.models.AuthorizationUseStatus :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "authorization_key": {"key": "properties.authorizationKey", "type": "str"}, "authorization_use_status": {"key": "properties.authorizationUseStatus", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, authorization_key: Optional[str] = None, authorization_use_status: Optional[Union[str, "_models.AuthorizationUseStatus"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword authorization_key: The authorization key. :paramtype authorization_key: str :keyword authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'. Known values are: "Available" and "InUse". :paramtype authorization_use_status: str or ~azure.mgmt.network.v2018_07_01.models.AuthorizationUseStatus :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.authorization_key = authorization_key self.authorization_use_status = authorization_use_status self.provisioning_state = provisioning_state class ExpressRouteCircuitConnection(SubResource): """Express Route Circuit Connection in an ExpressRouteCircuitPeering resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the circuit initiating connection. :vartype express_route_circuit_peering: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the peered circuit. :vartype peer_express_route_circuit_peering: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar address_prefix: /29 IP address space to carve out Customer addresses for tunnels. :vartype address_prefix: str :ivar authorization_key: The authorization key. :vartype authorization_key: str :ivar circuit_connection_status: Express Route Circuit Connection State. Possible values are: 'Connected' and 'Disconnected'. Known values are: "Connected", "Connecting", and "Disconnected". :vartype circuit_connection_status: str or ~azure.mgmt.network.v2018_07_01.models.CircuitConnectionStatus :ivar provisioning_state: Provisioning state of the circuit connection resource. Possible values are: 'Succeeded', 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "etag": {"readonly": True}, "circuit_connection_status": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "express_route_circuit_peering": {"key": "properties.expressRouteCircuitPeering", "type": "SubResource"}, "peer_express_route_circuit_peering": { "key": "properties.peerExpressRouteCircuitPeering", "type": "SubResource", }, "address_prefix": {"key": "properties.addressPrefix", "type": "str"}, "authorization_key": {"key": "properties.authorizationKey", "type": "str"}, "circuit_connection_status": {"key": "properties.circuitConnectionStatus", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, express_route_circuit_peering: Optional["_models.SubResource"] = None, peer_express_route_circuit_peering: Optional["_models.SubResource"] = None, address_prefix: Optional[str] = None, authorization_key: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the circuit initiating connection. :paramtype express_route_circuit_peering: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the peered circuit. :paramtype peer_express_route_circuit_peering: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword address_prefix: /29 IP address space to carve out Customer addresses for tunnels. :paramtype address_prefix: str :keyword authorization_key: The authorization key. :paramtype authorization_key: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.express_route_circuit_peering = express_route_circuit_peering self.peer_express_route_circuit_peering = peer_express_route_circuit_peering self.address_prefix = address_prefix self.authorization_key = authorization_key self.circuit_connection_status = None self.provisioning_state = None class ExpressRouteCircuitListResult(_serialization.Model): """Response for ListExpressRouteCircuit API service call. :ivar value: A list of ExpressRouteCircuits in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuit] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuit]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuit"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of ExpressRouteCircuits in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuit] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ExpressRouteCircuitPeering(SubResource): # pylint: disable=too-many-instance-attributes """Peering in an ExpressRouteCircuit resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar peering_type: The peering type. Known values are: "AzurePublicPeering", "AzurePrivatePeering", and "MicrosoftPeering". :vartype peering_type: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringType :ivar state: The peering state. Known values are: "Disabled" and "Enabled". :vartype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringState :ivar azure_asn: The Azure ASN. :vartype azure_asn: int :ivar peer_asn: The peer ASN. :vartype peer_asn: int :ivar primary_peer_address_prefix: The primary address prefix. :vartype primary_peer_address_prefix: str :ivar secondary_peer_address_prefix: The secondary address prefix. :vartype secondary_peer_address_prefix: str :ivar primary_azure_port: The primary port. :vartype primary_azure_port: str :ivar secondary_azure_port: The secondary port. :vartype secondary_azure_port: str :ivar shared_key: The shared key. :vartype shared_key: str :ivar vlan_id: The VLAN ID. :vartype vlan_id: int :ivar microsoft_peering_config: The Microsoft peering configuration. :vartype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :ivar stats: Gets peering stats. :vartype stats: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitStats :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar gateway_manager_etag: The GatewayManager Etag. :vartype gateway_manager_etag: str :ivar last_modified_by: Gets whether the provider or the customer last modified the peering. :vartype last_modified_by: str :ivar route_filter: The reference of the RouteFilter resource. :vartype route_filter: ~azure.mgmt.network.v2018_07_01.models.RouteFilter :ivar ipv6_peering_config: The IPv6 peering configuration. :vartype ipv6_peering_config: ~azure.mgmt.network.v2018_07_01.models.Ipv6ExpressRouteCircuitPeeringConfig :ivar connections: The list of circuit connections associated with Azure Private Peering for this circuit. :vartype connections: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitConnection] """ _validation = { "etag": {"readonly": True}, "peer_asn": {"maximum": 4294967295, "minimum": 1}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "peering_type": {"key": "properties.peeringType", "type": "str"}, "state": {"key": "properties.state", "type": "str"}, "azure_asn": {"key": "properties.azureASN", "type": "int"}, "peer_asn": {"key": "properties.peerASN", "type": "int"}, "primary_peer_address_prefix": {"key": "properties.primaryPeerAddressPrefix", "type": "str"}, "secondary_peer_address_prefix": {"key": "properties.secondaryPeerAddressPrefix", "type": "str"}, "primary_azure_port": {"key": "properties.primaryAzurePort", "type": "str"}, "secondary_azure_port": {"key": "properties.secondaryAzurePort", "type": "str"}, "shared_key": {"key": "properties.sharedKey", "type": "str"}, "vlan_id": {"key": "properties.vlanId", "type": "int"}, "microsoft_peering_config": { "key": "properties.microsoftPeeringConfig", "type": "ExpressRouteCircuitPeeringConfig", }, "stats": {"key": "properties.stats", "type": "ExpressRouteCircuitStats"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "gateway_manager_etag": {"key": "properties.gatewayManagerEtag", "type": "str"}, "last_modified_by": {"key": "properties.lastModifiedBy", "type": "str"}, "route_filter": {"key": "properties.routeFilter", "type": "RouteFilter"}, "ipv6_peering_config": {"key": "properties.ipv6PeeringConfig", "type": "Ipv6ExpressRouteCircuitPeeringConfig"}, "connections": {"key": "properties.connections", "type": "[ExpressRouteCircuitConnection]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, peering_type: Optional[Union[str, "_models.ExpressRoutePeeringType"]] = None, state: Optional[Union[str, "_models.ExpressRoutePeeringState"]] = None, azure_asn: Optional[int] = None, peer_asn: Optional[int] = None, primary_peer_address_prefix: Optional[str] = None, secondary_peer_address_prefix: Optional[str] = None, primary_azure_port: Optional[str] = None, secondary_azure_port: Optional[str] = None, shared_key: Optional[str] = None, vlan_id: Optional[int] = None, microsoft_peering_config: Optional["_models.ExpressRouteCircuitPeeringConfig"] = None, stats: Optional["_models.ExpressRouteCircuitStats"] = None, provisioning_state: Optional[str] = None, gateway_manager_etag: Optional[str] = None, last_modified_by: Optional[str] = None, route_filter: Optional["_models.RouteFilter"] = None, ipv6_peering_config: Optional["_models.Ipv6ExpressRouteCircuitPeeringConfig"] = None, connections: Optional[List["_models.ExpressRouteCircuitConnection"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword peering_type: The peering type. Known values are: "AzurePublicPeering", "AzurePrivatePeering", and "MicrosoftPeering". :paramtype peering_type: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringType :keyword state: The peering state. Known values are: "Disabled" and "Enabled". :paramtype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringState :keyword azure_asn: The Azure ASN. :paramtype azure_asn: int :keyword peer_asn: The peer ASN. :paramtype peer_asn: int :keyword primary_peer_address_prefix: The primary address prefix. :paramtype primary_peer_address_prefix: str :keyword secondary_peer_address_prefix: The secondary address prefix. :paramtype secondary_peer_address_prefix: str :keyword primary_azure_port: The primary port. :paramtype primary_azure_port: str :keyword secondary_azure_port: The secondary port. :paramtype secondary_azure_port: str :keyword shared_key: The shared key. :paramtype shared_key: str :keyword vlan_id: The VLAN ID. :paramtype vlan_id: int :keyword microsoft_peering_config: The Microsoft peering configuration. :paramtype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :keyword stats: Gets peering stats. :paramtype stats: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitStats :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str :keyword gateway_manager_etag: The GatewayManager Etag. :paramtype gateway_manager_etag: str :keyword last_modified_by: Gets whether the provider or the customer last modified the peering. :paramtype last_modified_by: str :keyword route_filter: The reference of the RouteFilter resource. :paramtype route_filter: ~azure.mgmt.network.v2018_07_01.models.RouteFilter :keyword ipv6_peering_config: The IPv6 peering configuration. :paramtype ipv6_peering_config: ~azure.mgmt.network.v2018_07_01.models.Ipv6ExpressRouteCircuitPeeringConfig :keyword connections: The list of circuit connections associated with Azure Private Peering for this circuit. :paramtype connections: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitConnection] """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.peering_type = peering_type self.state = state self.azure_asn = azure_asn self.peer_asn = peer_asn self.primary_peer_address_prefix = primary_peer_address_prefix self.secondary_peer_address_prefix = secondary_peer_address_prefix self.primary_azure_port = primary_azure_port self.secondary_azure_port = secondary_azure_port self.shared_key = shared_key self.vlan_id = vlan_id self.microsoft_peering_config = microsoft_peering_config self.stats = stats self.provisioning_state = provisioning_state self.gateway_manager_etag = gateway_manager_etag self.last_modified_by = last_modified_by self.route_filter = route_filter self.ipv6_peering_config = ipv6_peering_config self.connections = connections class ExpressRouteCircuitPeeringConfig(_serialization.Model): """Specifies the peering configuration. :ivar advertised_public_prefixes: The reference of AdvertisedPublicPrefixes. :vartype advertised_public_prefixes: list[str] :ivar advertised_communities: The communities of bgp peering. Specified for microsoft peering. :vartype advertised_communities: list[str] :ivar advertised_public_prefixes_state: AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'. Known values are: "NotConfigured", "Configuring", "Configured", and "ValidationNeeded". :vartype advertised_public_prefixes_state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState :ivar legacy_mode: The legacy mode of the peering. :vartype legacy_mode: int :ivar customer_asn: The CustomerASN of the peering. :vartype customer_asn: int :ivar routing_registry_name: The RoutingRegistryName of the configuration. :vartype routing_registry_name: str """ _attribute_map = { "advertised_public_prefixes": {"key": "advertisedPublicPrefixes", "type": "[str]"}, "advertised_communities": {"key": "advertisedCommunities", "type": "[str]"}, "advertised_public_prefixes_state": {"key": "advertisedPublicPrefixesState", "type": "str"}, "legacy_mode": {"key": "legacyMode", "type": "int"}, "customer_asn": {"key": "customerASN", "type": "int"}, "routing_registry_name": {"key": "routingRegistryName", "type": "str"}, } def __init__( self, *, advertised_public_prefixes: Optional[List[str]] = None, advertised_communities: Optional[List[str]] = None, advertised_public_prefixes_state: Optional[ Union[str, "_models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState"] ] = None, legacy_mode: Optional[int] = None, customer_asn: Optional[int] = None, routing_registry_name: Optional[str] = None, **kwargs ): """ :keyword advertised_public_prefixes: The reference of AdvertisedPublicPrefixes. :paramtype advertised_public_prefixes: list[str] :keyword advertised_communities: The communities of bgp peering. Specified for microsoft peering. :paramtype advertised_communities: list[str] :keyword advertised_public_prefixes_state: AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'. Known values are: "NotConfigured", "Configuring", "Configured", and "ValidationNeeded". :paramtype advertised_public_prefixes_state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState :keyword legacy_mode: The legacy mode of the peering. :paramtype legacy_mode: int :keyword customer_asn: The CustomerASN of the peering. :paramtype customer_asn: int :keyword routing_registry_name: The RoutingRegistryName of the configuration. :paramtype routing_registry_name: str """ super().__init__(**kwargs) self.advertised_public_prefixes = advertised_public_prefixes self.advertised_communities = advertised_communities self.advertised_public_prefixes_state = advertised_public_prefixes_state self.legacy_mode = legacy_mode self.customer_asn = customer_asn self.routing_registry_name = routing_registry_name class ExpressRouteCircuitPeeringListResult(_serialization.Model): """Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCircuit. :ivar value: The peerings in an express route circuit. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuitPeering]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuitPeering"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The peerings in an express route circuit. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ExpressRouteCircuitReference(_serialization.Model): """ExpressRouteCircuitReference. :ivar id: Corresponding Express Route Circuit Id. :vartype id: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, } def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: Corresponding Express Route Circuit Id. :paramtype id: str """ super().__init__(**kwargs) self.id = id class ExpressRouteCircuitRoutesTable(_serialization.Model): """The routes table associated with the ExpressRouteCircuit. :ivar network: IP address of a network entity. :vartype network: str :ivar next_hop: NextHop address. :vartype next_hop: str :ivar loc_prf: Local preference value as set with the set local-preference route-map configuration command. :vartype loc_prf: str :ivar weight: Route Weight. :vartype weight: int :ivar path: Autonomous system paths to the destination network. :vartype path: str """ _attribute_map = { "network": {"key": "network", "type": "str"}, "next_hop": {"key": "nextHop", "type": "str"}, "loc_prf": {"key": "locPrf", "type": "str"}, "weight": {"key": "weight", "type": "int"}, "path": {"key": "path", "type": "str"}, } def __init__( self, *, network: Optional[str] = None, next_hop: Optional[str] = None, loc_prf: Optional[str] = None, weight: Optional[int] = None, path: Optional[str] = None, **kwargs ): """ :keyword network: IP address of a network entity. :paramtype network: str :keyword next_hop: NextHop address. :paramtype next_hop: str :keyword loc_prf: Local preference value as set with the set local-preference route-map configuration command. :paramtype loc_prf: str :keyword weight: Route Weight. :paramtype weight: int :keyword path: Autonomous system paths to the destination network. :paramtype path: str """ super().__init__(**kwargs) self.network = network self.next_hop = next_hop self.loc_prf = loc_prf self.weight = weight self.path = path class ExpressRouteCircuitRoutesTableSummary(_serialization.Model): """The routes table associated with the ExpressRouteCircuit. :ivar neighbor: IP address of the neighbor. :vartype neighbor: str :ivar v: BGP version number spoken to the neighbor. :vartype v: int :ivar as_property: Autonomous system number. :vartype as_property: int :ivar up_down: The length of time that the BGP session has been in the Established state, or the current status if not in the Established state. :vartype up_down: str :ivar state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have been received from a neighbor or peer group. :vartype state_pfx_rcd: str """ _attribute_map = { "neighbor": {"key": "neighbor", "type": "str"}, "v": {"key": "v", "type": "int"}, "as_property": {"key": "as", "type": "int"}, "up_down": {"key": "upDown", "type": "str"}, "state_pfx_rcd": {"key": "statePfxRcd", "type": "str"}, } def __init__( self, *, neighbor: Optional[str] = None, v: Optional[int] = None, as_property: Optional[int] = None, up_down: Optional[str] = None, state_pfx_rcd: Optional[str] = None, **kwargs ): """ :keyword neighbor: IP address of the neighbor. :paramtype neighbor: str :keyword v: BGP version number spoken to the neighbor. :paramtype v: int :keyword as_property: Autonomous system number. :paramtype as_property: int :keyword up_down: The length of time that the BGP session has been in the Established state, or the current status if not in the Established state. :paramtype up_down: str :keyword state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have been received from a neighbor or peer group. :paramtype state_pfx_rcd: str """ super().__init__(**kwargs) self.neighbor = neighbor self.v = v self.as_property = as_property self.up_down = up_down self.state_pfx_rcd = state_pfx_rcd class ExpressRouteCircuitsArpTableListResult(_serialization.Model): """Response for ListArpTable associated with the Express Route Circuits API. :ivar value: Gets list of the ARP table. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitArpTable] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuitArpTable]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuitArpTable"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: Gets list of the ARP table. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitArpTable] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ExpressRouteCircuitServiceProviderProperties(_serialization.Model): """Contains ServiceProviderProperties in an ExpressRouteCircuit. :ivar service_provider_name: The serviceProviderName. :vartype service_provider_name: str :ivar peering_location: The peering location. :vartype peering_location: str :ivar bandwidth_in_mbps: The BandwidthInMbps. :vartype bandwidth_in_mbps: int """ _attribute_map = { "service_provider_name": {"key": "serviceProviderName", "type": "str"}, "peering_location": {"key": "peeringLocation", "type": "str"}, "bandwidth_in_mbps": {"key": "bandwidthInMbps", "type": "int"}, } def __init__( self, *, service_provider_name: Optional[str] = None, peering_location: Optional[str] = None, bandwidth_in_mbps: Optional[int] = None, **kwargs ): """ :keyword service_provider_name: The serviceProviderName. :paramtype service_provider_name: str :keyword peering_location: The peering location. :paramtype peering_location: str :keyword bandwidth_in_mbps: The BandwidthInMbps. :paramtype bandwidth_in_mbps: int """ super().__init__(**kwargs) self.service_provider_name = service_provider_name self.peering_location = peering_location self.bandwidth_in_mbps = bandwidth_in_mbps class ExpressRouteCircuitSku(_serialization.Model): """Contains SKU in an ExpressRouteCircuit. :ivar name: The name of the SKU. :vartype name: str :ivar tier: The tier of the SKU. Possible values are 'Standard' and 'Premium'. Known values are: "Standard" and "Premium". :vartype tier: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSkuTier :ivar family: The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'. Known values are: "UnlimitedData" and "MeteredData". :vartype family: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSkuFamily """ _attribute_map = { "name": {"key": "name", "type": "str"}, "tier": {"key": "tier", "type": "str"}, "family": {"key": "family", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, tier: Optional[Union[str, "_models.ExpressRouteCircuitSkuTier"]] = None, family: Optional[Union[str, "_models.ExpressRouteCircuitSkuFamily"]] = None, **kwargs ): """ :keyword name: The name of the SKU. :paramtype name: str :keyword tier: The tier of the SKU. Possible values are 'Standard' and 'Premium'. Known values are: "Standard" and "Premium". :paramtype tier: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSkuTier :keyword family: The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'. Known values are: "UnlimitedData" and "MeteredData". :paramtype family: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitSkuFamily """ super().__init__(**kwargs) self.name = name self.tier = tier self.family = family class ExpressRouteCircuitsRoutesTableListResult(_serialization.Model): """Response for ListRoutesTable associated with the Express Route Circuits API. :ivar value: The list of routes table. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitRoutesTable] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuitRoutesTable]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuitRoutesTable"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The list of routes table. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitRoutesTable] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ExpressRouteCircuitsRoutesTableSummaryListResult(_serialization.Model): """Response for ListRoutesTable associated with the Express Route Circuits API. :ivar value: A list of the routes table. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitRoutesTableSummary] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCircuitRoutesTableSummary]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCircuitRoutesTableSummary"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of the routes table. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitRoutesTableSummary] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ExpressRouteCircuitStats(_serialization.Model): """Contains stats associated with the peering. :ivar primarybytes_in: Gets BytesIn of the peering. :vartype primarybytes_in: int :ivar primarybytes_out: Gets BytesOut of the peering. :vartype primarybytes_out: int :ivar secondarybytes_in: Gets BytesIn of the peering. :vartype secondarybytes_in: int :ivar secondarybytes_out: Gets BytesOut of the peering. :vartype secondarybytes_out: int """ _attribute_map = { "primarybytes_in": {"key": "primarybytesIn", "type": "int"}, "primarybytes_out": {"key": "primarybytesOut", "type": "int"}, "secondarybytes_in": {"key": "secondarybytesIn", "type": "int"}, "secondarybytes_out": {"key": "secondarybytesOut", "type": "int"}, } def __init__( self, *, primarybytes_in: Optional[int] = None, primarybytes_out: Optional[int] = None, secondarybytes_in: Optional[int] = None, secondarybytes_out: Optional[int] = None, **kwargs ): """ :keyword primarybytes_in: Gets BytesIn of the peering. :paramtype primarybytes_in: int :keyword primarybytes_out: Gets BytesOut of the peering. :paramtype primarybytes_out: int :keyword secondarybytes_in: Gets BytesIn of the peering. :paramtype secondarybytes_in: int :keyword secondarybytes_out: Gets BytesOut of the peering. :paramtype secondarybytes_out: int """ super().__init__(**kwargs) self.primarybytes_in = primarybytes_in self.primarybytes_out = primarybytes_out self.secondarybytes_in = secondarybytes_in self.secondarybytes_out = secondarybytes_out class ExpressRouteCrossConnection(Resource): # pylint: disable=too-many-instance-attributes """ExpressRouteCrossConnection resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar primary_azure_port: The name of the primary port. :vartype primary_azure_port: str :ivar secondary_azure_port: The name of the secondary port. :vartype secondary_azure_port: str :ivar s_tag: The identifier of the circuit traffic. :vartype s_tag: int :ivar peering_location: The peering location of the ExpressRoute circuit. :vartype peering_location: str :ivar bandwidth_in_mbps: The circuit bandwidth In Mbps. :vartype bandwidth_in_mbps: int :ivar express_route_circuit: The ExpressRouteCircuit. :vartype express_route_circuit: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitReference :ivar service_provider_provisioning_state: The provisioning state of the circuit in the connectivity provider system. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and "Deprovisioning". :vartype service_provider_provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ServiceProviderProvisioningState :ivar service_provider_notes: Additional read only notes set by the connectivity provider. :vartype service_provider_notes: str :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar peerings: The list of peerings. :vartype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionPeering] """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, "primary_azure_port": {"readonly": True}, "secondary_azure_port": {"readonly": True}, "s_tag": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "primary_azure_port": {"key": "properties.primaryAzurePort", "type": "str"}, "secondary_azure_port": {"key": "properties.secondaryAzurePort", "type": "str"}, "s_tag": {"key": "properties.sTag", "type": "int"}, "peering_location": {"key": "properties.peeringLocation", "type": "str"}, "bandwidth_in_mbps": {"key": "properties.bandwidthInMbps", "type": "int"}, "express_route_circuit": {"key": "properties.expressRouteCircuit", "type": "ExpressRouteCircuitReference"}, "service_provider_provisioning_state": {"key": "properties.serviceProviderProvisioningState", "type": "str"}, "service_provider_notes": {"key": "properties.serviceProviderNotes", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "peerings": {"key": "properties.peerings", "type": "[ExpressRouteCrossConnectionPeering]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, peering_location: Optional[str] = None, bandwidth_in_mbps: Optional[int] = None, express_route_circuit: Optional["_models.ExpressRouteCircuitReference"] = None, service_provider_provisioning_state: Optional[Union[str, "_models.ServiceProviderProvisioningState"]] = None, service_provider_notes: Optional[str] = None, peerings: Optional[List["_models.ExpressRouteCrossConnectionPeering"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword peering_location: The peering location of the ExpressRoute circuit. :paramtype peering_location: str :keyword bandwidth_in_mbps: The circuit bandwidth In Mbps. :paramtype bandwidth_in_mbps: int :keyword express_route_circuit: The ExpressRouteCircuit. :paramtype express_route_circuit: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitReference :keyword service_provider_provisioning_state: The provisioning state of the circuit in the connectivity provider system. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and "Deprovisioning". :paramtype service_provider_provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ServiceProviderProvisioningState :keyword service_provider_notes: Additional read only notes set by the connectivity provider. :paramtype service_provider_notes: str :keyword peerings: The list of peerings. :paramtype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionPeering] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.primary_azure_port = None self.secondary_azure_port = None self.s_tag = None self.peering_location = peering_location self.bandwidth_in_mbps = bandwidth_in_mbps self.express_route_circuit = express_route_circuit self.service_provider_provisioning_state = service_provider_provisioning_state self.service_provider_notes = service_provider_notes self.provisioning_state = None self.peerings = peerings class ExpressRouteCrossConnectionListResult(_serialization.Model): """Response for ListExpressRouteCrossConnection API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of ExpressRouteCrossConnection resources. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnection] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCrossConnection]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.ExpressRouteCrossConnection"]] = None, **kwargs): """ :keyword value: A list of ExpressRouteCrossConnection resources. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnection] """ super().__init__(**kwargs) self.value = value self.next_link = None class ExpressRouteCrossConnectionPeering(SubResource): # pylint: disable=too-many-instance-attributes """Peering in an ExpressRoute Cross Connection resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar peering_type: The peering type. Known values are: "AzurePublicPeering", "AzurePrivatePeering", and "MicrosoftPeering". :vartype peering_type: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringType :ivar state: The peering state. Known values are: "Disabled" and "Enabled". :vartype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringState :ivar azure_asn: The Azure ASN. :vartype azure_asn: int :ivar peer_asn: The peer ASN. :vartype peer_asn: int :ivar primary_peer_address_prefix: The primary address prefix. :vartype primary_peer_address_prefix: str :ivar secondary_peer_address_prefix: The secondary address prefix. :vartype secondary_peer_address_prefix: str :ivar primary_azure_port: The primary port. :vartype primary_azure_port: str :ivar secondary_azure_port: The secondary port. :vartype secondary_azure_port: str :ivar shared_key: The shared key. :vartype shared_key: str :ivar vlan_id: The VLAN ID. :vartype vlan_id: int :ivar microsoft_peering_config: The Microsoft peering configuration. :vartype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar gateway_manager_etag: The GatewayManager Etag. :vartype gateway_manager_etag: str :ivar last_modified_by: Gets whether the provider or the customer last modified the peering. :vartype last_modified_by: str :ivar ipv6_peering_config: The IPv6 peering configuration. :vartype ipv6_peering_config: ~azure.mgmt.network.v2018_07_01.models.Ipv6ExpressRouteCircuitPeeringConfig """ _validation = { "etag": {"readonly": True}, "azure_asn": {"readonly": True}, "peer_asn": {"maximum": 4294967295, "minimum": 1}, "primary_azure_port": {"readonly": True}, "secondary_azure_port": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "peering_type": {"key": "properties.peeringType", "type": "str"}, "state": {"key": "properties.state", "type": "str"}, "azure_asn": {"key": "properties.azureASN", "type": "int"}, "peer_asn": {"key": "properties.peerASN", "type": "int"}, "primary_peer_address_prefix": {"key": "properties.primaryPeerAddressPrefix", "type": "str"}, "secondary_peer_address_prefix": {"key": "properties.secondaryPeerAddressPrefix", "type": "str"}, "primary_azure_port": {"key": "properties.primaryAzurePort", "type": "str"}, "secondary_azure_port": {"key": "properties.secondaryAzurePort", "type": "str"}, "shared_key": {"key": "properties.sharedKey", "type": "str"}, "vlan_id": {"key": "properties.vlanId", "type": "int"}, "microsoft_peering_config": { "key": "properties.microsoftPeeringConfig", "type": "ExpressRouteCircuitPeeringConfig", }, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "gateway_manager_etag": {"key": "properties.gatewayManagerEtag", "type": "str"}, "last_modified_by": {"key": "properties.lastModifiedBy", "type": "str"}, "ipv6_peering_config": {"key": "properties.ipv6PeeringConfig", "type": "Ipv6ExpressRouteCircuitPeeringConfig"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, peering_type: Optional[Union[str, "_models.ExpressRoutePeeringType"]] = None, state: Optional[Union[str, "_models.ExpressRoutePeeringState"]] = None, peer_asn: Optional[int] = None, primary_peer_address_prefix: Optional[str] = None, secondary_peer_address_prefix: Optional[str] = None, shared_key: Optional[str] = None, vlan_id: Optional[int] = None, microsoft_peering_config: Optional["_models.ExpressRouteCircuitPeeringConfig"] = None, gateway_manager_etag: Optional[str] = None, last_modified_by: Optional[str] = None, ipv6_peering_config: Optional["_models.Ipv6ExpressRouteCircuitPeeringConfig"] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword peering_type: The peering type. Known values are: "AzurePublicPeering", "AzurePrivatePeering", and "MicrosoftPeering". :paramtype peering_type: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringType :keyword state: The peering state. Known values are: "Disabled" and "Enabled". :paramtype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRoutePeeringState :keyword peer_asn: The peer ASN. :paramtype peer_asn: int :keyword primary_peer_address_prefix: The primary address prefix. :paramtype primary_peer_address_prefix: str :keyword secondary_peer_address_prefix: The secondary address prefix. :paramtype secondary_peer_address_prefix: str :keyword shared_key: The shared key. :paramtype shared_key: str :keyword vlan_id: The VLAN ID. :paramtype vlan_id: int :keyword microsoft_peering_config: The Microsoft peering configuration. :paramtype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :keyword gateway_manager_etag: The GatewayManager Etag. :paramtype gateway_manager_etag: str :keyword last_modified_by: Gets whether the provider or the customer last modified the peering. :paramtype last_modified_by: str :keyword ipv6_peering_config: The IPv6 peering configuration. :paramtype ipv6_peering_config: ~azure.mgmt.network.v2018_07_01.models.Ipv6ExpressRouteCircuitPeeringConfig """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.peering_type = peering_type self.state = state self.azure_asn = None self.peer_asn = peer_asn self.primary_peer_address_prefix = primary_peer_address_prefix self.secondary_peer_address_prefix = secondary_peer_address_prefix self.primary_azure_port = None self.secondary_azure_port = None self.shared_key = shared_key self.vlan_id = vlan_id self.microsoft_peering_config = microsoft_peering_config self.provisioning_state = None self.gateway_manager_etag = gateway_manager_etag self.last_modified_by = last_modified_by self.ipv6_peering_config = ipv6_peering_config class ExpressRouteCrossConnectionPeeringList(_serialization.Model): """Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCrossConnection. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: The peerings in an express route cross connection. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionPeering] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCrossConnectionPeering]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.ExpressRouteCrossConnectionPeering"]] = None, **kwargs): """ :keyword value: The peerings in an express route cross connection. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionPeering] """ super().__init__(**kwargs) self.value = value self.next_link = None class ExpressRouteCrossConnectionRoutesTableSummary(_serialization.Model): """The routes table associated with the ExpressRouteCircuit. :ivar neighbor: IP address of Neighbor router. :vartype neighbor: str :ivar asn: Autonomous system number. :vartype asn: int :ivar up_down: The length of time that the BGP session has been in the Established state, or the current status if not in the Established state. :vartype up_down: str :ivar state_or_prefixes_received: Current state of the BGP session, and the number of prefixes that have been received from a neighbor or peer group. :vartype state_or_prefixes_received: str """ _attribute_map = { "neighbor": {"key": "neighbor", "type": "str"}, "asn": {"key": "asn", "type": "int"}, "up_down": {"key": "upDown", "type": "str"}, "state_or_prefixes_received": {"key": "stateOrPrefixesReceived", "type": "str"}, } def __init__( self, *, neighbor: Optional[str] = None, asn: Optional[int] = None, up_down: Optional[str] = None, state_or_prefixes_received: Optional[str] = None, **kwargs ): """ :keyword neighbor: IP address of Neighbor router. :paramtype neighbor: str :keyword asn: Autonomous system number. :paramtype asn: int :keyword up_down: The length of time that the BGP session has been in the Established state, or the current status if not in the Established state. :paramtype up_down: str :keyword state_or_prefixes_received: Current state of the BGP session, and the number of prefixes that have been received from a neighbor or peer group. :paramtype state_or_prefixes_received: str """ super().__init__(**kwargs) self.neighbor = neighbor self.asn = asn self.up_down = up_down self.state_or_prefixes_received = state_or_prefixes_received class ExpressRouteCrossConnectionsRoutesTableSummaryListResult(_serialization.Model): """Response for ListRoutesTable associated with the Express Route Cross Connections. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of the routes table. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionRoutesTableSummary] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteCrossConnectionRoutesTableSummary]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteCrossConnectionRoutesTableSummary"]] = None, **kwargs ): """ :keyword value: A list of the routes table. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCrossConnectionRoutesTableSummary] """ super().__init__(**kwargs) self.value = value self.next_link = None class ExpressRouteServiceProvider(Resource): """A ExpressRouteResourceProvider object. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar peering_locations: Get a list of peering locations. :vartype peering_locations: list[str] :ivar bandwidths_offered: Gets bandwidths offered. :vartype bandwidths_offered: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteServiceProviderBandwidthsOffered] :ivar provisioning_state: Gets the provisioning state of the resource. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "peering_locations": {"key": "properties.peeringLocations", "type": "[str]"}, "bandwidths_offered": { "key": "properties.bandwidthsOffered", "type": "[ExpressRouteServiceProviderBandwidthsOffered]", }, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, peering_locations: Optional[List[str]] = None, bandwidths_offered: Optional[List["_models.ExpressRouteServiceProviderBandwidthsOffered"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword peering_locations: Get a list of peering locations. :paramtype peering_locations: list[str] :keyword bandwidths_offered: Gets bandwidths offered. :paramtype bandwidths_offered: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteServiceProviderBandwidthsOffered] :keyword provisioning_state: Gets the provisioning state of the resource. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.peering_locations = peering_locations self.bandwidths_offered = bandwidths_offered self.provisioning_state = provisioning_state class ExpressRouteServiceProviderBandwidthsOffered(_serialization.Model): """Contains bandwidths offered in ExpressRouteServiceProvider resources. :ivar offer_name: The OfferName. :vartype offer_name: str :ivar value_in_mbps: The ValueInMbps. :vartype value_in_mbps: int """ _attribute_map = { "offer_name": {"key": "offerName", "type": "str"}, "value_in_mbps": {"key": "valueInMbps", "type": "int"}, } def __init__(self, *, offer_name: Optional[str] = None, value_in_mbps: Optional[int] = None, **kwargs): """ :keyword offer_name: The OfferName. :paramtype offer_name: str :keyword value_in_mbps: The ValueInMbps. :paramtype value_in_mbps: int """ super().__init__(**kwargs) self.offer_name = offer_name self.value_in_mbps = value_in_mbps class ExpressRouteServiceProviderListResult(_serialization.Model): """Response for the ListExpressRouteServiceProvider API service call. :ivar value: A list of ExpressRouteResourceProvider resources. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteServiceProvider] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ExpressRouteServiceProvider]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ExpressRouteServiceProvider"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of ExpressRouteResourceProvider resources. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteServiceProvider] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class FlowLogInformation(_serialization.Model): """Information on the configuration of flow log and traffic analytics (optional) . All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The ID of the resource to configure for flow log and traffic analytics (optional) . Required. :vartype target_resource_id: str :ivar flow_analytics_configuration: Parameters that define the configuration of traffic analytics. :vartype flow_analytics_configuration: ~azure.mgmt.network.v2018_07_01.models.TrafficAnalyticsProperties :ivar storage_id: ID of the storage account which is used to store the flow log. Required. :vartype storage_id: str :ivar enabled: Flag to enable/disable flow logging. Required. :vartype enabled: bool :ivar retention_policy: Parameters that define the retention policy for flow log. :vartype retention_policy: ~azure.mgmt.network.v2018_07_01.models.RetentionPolicyParameters """ _validation = { "target_resource_id": {"required": True}, "storage_id": {"required": True}, "enabled": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, "flow_analytics_configuration": {"key": "flowAnalyticsConfiguration", "type": "TrafficAnalyticsProperties"}, "storage_id": {"key": "properties.storageId", "type": "str"}, "enabled": {"key": "properties.enabled", "type": "bool"}, "retention_policy": {"key": "properties.retentionPolicy", "type": "RetentionPolicyParameters"}, } def __init__( self, *, target_resource_id: str, storage_id: str, enabled: bool, flow_analytics_configuration: Optional["_models.TrafficAnalyticsProperties"] = None, retention_policy: Optional["_models.RetentionPolicyParameters"] = None, **kwargs ): """ :keyword target_resource_id: The ID of the resource to configure for flow log and traffic analytics (optional) . Required. :paramtype target_resource_id: str :keyword flow_analytics_configuration: Parameters that define the configuration of traffic analytics. :paramtype flow_analytics_configuration: ~azure.mgmt.network.v2018_07_01.models.TrafficAnalyticsProperties :keyword storage_id: ID of the storage account which is used to store the flow log. Required. :paramtype storage_id: str :keyword enabled: Flag to enable/disable flow logging. Required. :paramtype enabled: bool :keyword retention_policy: Parameters that define the retention policy for flow log. :paramtype retention_policy: ~azure.mgmt.network.v2018_07_01.models.RetentionPolicyParameters """ super().__init__(**kwargs) self.target_resource_id = target_resource_id self.flow_analytics_configuration = flow_analytics_configuration self.storage_id = storage_id self.enabled = enabled self.retention_policy = retention_policy class FlowLogStatusParameters(_serialization.Model): """Parameters that define a resource to query flow log and traffic analytics (optional) status. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The target resource where getting the flow log and traffic analytics (optional) status. Required. :vartype target_resource_id: str """ _validation = { "target_resource_id": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, } def __init__(self, *, target_resource_id: str, **kwargs): """ :keyword target_resource_id: The target resource where getting the flow log and traffic analytics (optional) status. Required. :paramtype target_resource_id: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id class FrontendIPConfiguration(SubResource): # pylint: disable=too-many-instance-attributes """Frontend IP address of the load balancer. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :vartype zones: list[str] :ivar inbound_nat_rules: Read only. Inbound rules URIs that use this frontend IP. :vartype inbound_nat_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar inbound_nat_pools: Read only. Inbound pools URIs that use this frontend IP. :vartype inbound_nat_pools: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar outbound_rules: Read only. Outbound rules URIs that use this frontend IP. :vartype outbound_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar load_balancing_rules: Gets load balancing rules URIs that use this frontend IP. :vartype load_balancing_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar private_ip_address: The private IP address of the IP configuration. :vartype private_ip_address: str :ivar private_ip_allocation_method: The Private IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :vartype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar subnet: The reference of the subnet resource. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :ivar public_ip_address: The reference of the Public IP resource. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :ivar public_ip_prefix: The reference of the Public IP Prefix resource. :vartype public_ip_prefix: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "inbound_nat_rules": {"readonly": True}, "inbound_nat_pools": {"readonly": True}, "outbound_rules": {"readonly": True}, "load_balancing_rules": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "zones": {"key": "zones", "type": "[str]"}, "inbound_nat_rules": {"key": "properties.inboundNatRules", "type": "[SubResource]"}, "inbound_nat_pools": {"key": "properties.inboundNatPools", "type": "[SubResource]"}, "outbound_rules": {"key": "properties.outboundRules", "type": "[SubResource]"}, "load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"}, "private_ip_address": {"key": "properties.privateIPAddress", "type": "str"}, "private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "Subnet"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"}, "public_ip_prefix": {"key": "properties.publicIPPrefix", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, zones: Optional[List[str]] = None, private_ip_address: Optional[str] = None, private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, subnet: Optional["_models.Subnet"] = None, public_ip_address: Optional["_models.PublicIPAddress"] = None, public_ip_prefix: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :paramtype zones: list[str] :keyword private_ip_address: The private IP address of the IP configuration. :paramtype private_ip_address: str :keyword private_ip_allocation_method: The Private IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :paramtype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword subnet: The reference of the subnet resource. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :keyword public_ip_address: The reference of the Public IP resource. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :keyword public_ip_prefix: The reference of the Public IP Prefix resource. :paramtype public_ip_prefix: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.zones = zones self.inbound_nat_rules = None self.inbound_nat_pools = None self.outbound_rules = None self.load_balancing_rules = None self.private_ip_address = private_ip_address self.private_ip_allocation_method = private_ip_allocation_method self.subnet = subnet self.public_ip_address = public_ip_address self.public_ip_prefix = public_ip_prefix self.provisioning_state = provisioning_state class GatewayRoute(_serialization.Model): """Gateway routing details. Variables are only populated by the server, and will be ignored when sending a request. :ivar local_address: The gateway's local address. :vartype local_address: str :ivar network: The route's network prefix. :vartype network: str :ivar next_hop: The route's next hop. :vartype next_hop: str :ivar source_peer: The peer this route was learned from. :vartype source_peer: str :ivar origin: The source this route was learned from. :vartype origin: str :ivar as_path: The route's AS path sequence. :vartype as_path: str :ivar weight: The route's weight. :vartype weight: int """ _validation = { "local_address": {"readonly": True}, "network": {"readonly": True}, "next_hop": {"readonly": True}, "source_peer": {"readonly": True}, "origin": {"readonly": True}, "as_path": {"readonly": True}, "weight": {"readonly": True}, } _attribute_map = { "local_address": {"key": "localAddress", "type": "str"}, "network": {"key": "network", "type": "str"}, "next_hop": {"key": "nextHop", "type": "str"}, "source_peer": {"key": "sourcePeer", "type": "str"}, "origin": {"key": "origin", "type": "str"}, "as_path": {"key": "asPath", "type": "str"}, "weight": {"key": "weight", "type": "int"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.local_address = None self.network = None self.next_hop = None self.source_peer = None self.origin = None self.as_path = None self.weight = None class GatewayRouteListResult(_serialization.Model): """List of virtual network gateway routes. :ivar value: List of gateway routes. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.GatewayRoute] """ _attribute_map = { "value": {"key": "value", "type": "[GatewayRoute]"}, } def __init__(self, *, value: Optional[List["_models.GatewayRoute"]] = None, **kwargs): """ :keyword value: List of gateway routes. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.GatewayRoute] """ super().__init__(**kwargs) self.value = value class GetVpnSitesConfigurationRequest(_serialization.Model): """List of Vpn-Sites. :ivar vpn_sites: List of resource-ids of the vpn-sites for which config is to be downloaded. :vartype vpn_sites: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar output_blob_sas_url: The sas-url to download the configurations for vpn-sites. :vartype output_blob_sas_url: str """ _attribute_map = { "vpn_sites": {"key": "vpnSites", "type": "[SubResource]"}, "output_blob_sas_url": {"key": "outputBlobSasUrl", "type": "str"}, } def __init__( self, *, vpn_sites: Optional[List["_models.SubResource"]] = None, output_blob_sas_url: Optional[str] = None, **kwargs ): """ :keyword vpn_sites: List of resource-ids of the vpn-sites for which config is to be downloaded. :paramtype vpn_sites: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword output_blob_sas_url: The sas-url to download the configurations for vpn-sites. :paramtype output_blob_sas_url: str """ super().__init__(**kwargs) self.vpn_sites = vpn_sites self.output_blob_sas_url = output_blob_sas_url class HTTPConfiguration(_serialization.Model): """HTTP configuration of the connectivity check. :ivar method: HTTP method. "Get" :vartype method: str or ~azure.mgmt.network.v2018_07_01.models.HTTPMethod :ivar headers: List of HTTP headers. :vartype headers: list[~azure.mgmt.network.v2018_07_01.models.HTTPHeader] :ivar valid_status_codes: Valid status codes. :vartype valid_status_codes: list[int] """ _attribute_map = { "method": {"key": "method", "type": "str"}, "headers": {"key": "headers", "type": "[HTTPHeader]"}, "valid_status_codes": {"key": "validStatusCodes", "type": "[int]"}, } def __init__( self, *, method: Optional[Union[str, "_models.HTTPMethod"]] = None, headers: Optional[List["_models.HTTPHeader"]] = None, valid_status_codes: Optional[List[int]] = None, **kwargs ): """ :keyword method: HTTP method. "Get" :paramtype method: str or ~azure.mgmt.network.v2018_07_01.models.HTTPMethod :keyword headers: List of HTTP headers. :paramtype headers: list[~azure.mgmt.network.v2018_07_01.models.HTTPHeader] :keyword valid_status_codes: Valid status codes. :paramtype valid_status_codes: list[int] """ super().__init__(**kwargs) self.method = method self.headers = headers self.valid_status_codes = valid_status_codes class HTTPHeader(_serialization.Model): """Describes the HTTP header. :ivar name: The name in HTTP header. :vartype name: str :ivar value: The value in HTTP header. :vartype value: str """ _attribute_map = { "name": {"key": "name", "type": "str"}, "value": {"key": "value", "type": "str"}, } def __init__(self, *, name: Optional[str] = None, value: Optional[str] = None, **kwargs): """ :keyword name: The name in HTTP header. :paramtype name: str :keyword value: The value in HTTP header. :paramtype value: str """ super().__init__(**kwargs) self.name = name self.value = value class HubVirtualNetworkConnection(Resource): """HubVirtualNetworkConnection Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar remote_virtual_network: Reference to the remote virtual network. :vartype remote_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar allow_hub_to_remote_vnet_transit: VirtualHub to RemoteVnet transit to enabled or not. :vartype allow_hub_to_remote_vnet_transit: bool :ivar allow_remote_vnet_to_use_hub_vnet_gateways: Allow RemoteVnet to use Virtual Hub's gateways. :vartype allow_remote_vnet_to_use_hub_vnet_gateways: bool :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "remote_virtual_network": {"key": "properties.remoteVirtualNetwork", "type": "SubResource"}, "allow_hub_to_remote_vnet_transit": {"key": "properties.allowHubToRemoteVnetTransit", "type": "bool"}, "allow_remote_vnet_to_use_hub_vnet_gateways": { "key": "properties.allowRemoteVnetToUseHubVnetGateways", "type": "bool", }, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, remote_virtual_network: Optional["_models.SubResource"] = None, allow_hub_to_remote_vnet_transit: Optional[bool] = None, allow_remote_vnet_to_use_hub_vnet_gateways: Optional[bool] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword remote_virtual_network: Reference to the remote virtual network. :paramtype remote_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword allow_hub_to_remote_vnet_transit: VirtualHub to RemoteVnet transit to enabled or not. :paramtype allow_hub_to_remote_vnet_transit: bool :keyword allow_remote_vnet_to_use_hub_vnet_gateways: Allow RemoteVnet to use Virtual Hub's gateways. :paramtype allow_remote_vnet_to_use_hub_vnet_gateways: bool :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.remote_virtual_network = remote_virtual_network self.allow_hub_to_remote_vnet_transit = allow_hub_to_remote_vnet_transit self.allow_remote_vnet_to_use_hub_vnet_gateways = allow_remote_vnet_to_use_hub_vnet_gateways self.provisioning_state = provisioning_state class InboundNatPool(SubResource): # pylint: disable=too-many-instance-attributes """Inbound NAT pool of the load balancer. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar frontend_ip_configuration: A reference to frontend IP addresses. :vartype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :ivar frontend_port_range_start: The first port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65534. :vartype frontend_port_range_start: int :ivar frontend_port_range_end: The last port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65535. :vartype frontend_port_range_end: int :ivar backend_port: The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535. :vartype backend_port: int :ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :vartype idle_timeout_in_minutes: int :ivar enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :vartype enable_floating_ip: bool :ivar enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :vartype enable_tcp_reset: bool :ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"}, "protocol": {"key": "properties.protocol", "type": "str"}, "frontend_port_range_start": {"key": "properties.frontendPortRangeStart", "type": "int"}, "frontend_port_range_end": {"key": "properties.frontendPortRangeEnd", "type": "int"}, "backend_port": {"key": "properties.backendPort", "type": "int"}, "idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"}, "enable_floating_ip": {"key": "properties.enableFloatingIP", "type": "bool"}, "enable_tcp_reset": {"key": "properties.enableTcpReset", "type": "bool"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, frontend_ip_configuration: Optional["_models.SubResource"] = None, protocol: Optional[Union[str, "_models.TransportProtocol"]] = None, frontend_port_range_start: Optional[int] = None, frontend_port_range_end: Optional[int] = None, backend_port: Optional[int] = None, idle_timeout_in_minutes: Optional[int] = None, enable_floating_ip: Optional[bool] = None, enable_tcp_reset: Optional[bool] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword frontend_ip_configuration: A reference to frontend IP addresses. :paramtype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :keyword frontend_port_range_start: The first port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65534. :paramtype frontend_port_range_start: int :keyword frontend_port_range_end: The last port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65535. :paramtype frontend_port_range_end: int :keyword backend_port: The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535. :paramtype backend_port: int :keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :paramtype idle_timeout_in_minutes: int :keyword enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :paramtype enable_floating_ip: bool :keyword enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :paramtype enable_tcp_reset: bool :keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.frontend_ip_configuration = frontend_ip_configuration self.protocol = protocol self.frontend_port_range_start = frontend_port_range_start self.frontend_port_range_end = frontend_port_range_end self.backend_port = backend_port self.idle_timeout_in_minutes = idle_timeout_in_minutes self.enable_floating_ip = enable_floating_ip self.enable_tcp_reset = enable_tcp_reset self.provisioning_state = provisioning_state class InboundNatRule(SubResource): # pylint: disable=too-many-instance-attributes """Inbound NAT rule of the load balancer. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar frontend_ip_configuration: A reference to frontend IP addresses. :vartype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar backend_ip_configuration: A reference to a private IP address defined on a network interface of a VM. Traffic sent to the frontend port of each of the frontend IP configurations is forwarded to the backend IP. :vartype backend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration :ivar protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :ivar frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values range from 1 to 65534. :vartype frontend_port: int :ivar backend_port: The port used for the internal endpoint. Acceptable values range from 1 to 65535. :vartype backend_port: int :ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :vartype idle_timeout_in_minutes: int :ivar enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :vartype enable_floating_ip: bool :ivar enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :vartype enable_tcp_reset: bool :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "backend_ip_configuration": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"}, "backend_ip_configuration": { "key": "properties.backendIPConfiguration", "type": "NetworkInterfaceIPConfiguration", }, "protocol": {"key": "properties.protocol", "type": "str"}, "frontend_port": {"key": "properties.frontendPort", "type": "int"}, "backend_port": {"key": "properties.backendPort", "type": "int"}, "idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"}, "enable_floating_ip": {"key": "properties.enableFloatingIP", "type": "bool"}, "enable_tcp_reset": {"key": "properties.enableTcpReset", "type": "bool"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, frontend_ip_configuration: Optional["_models.SubResource"] = None, protocol: Optional[Union[str, "_models.TransportProtocol"]] = None, frontend_port: Optional[int] = None, backend_port: Optional[int] = None, idle_timeout_in_minutes: Optional[int] = None, enable_floating_ip: Optional[bool] = None, enable_tcp_reset: Optional[bool] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword frontend_ip_configuration: A reference to frontend IP addresses. :paramtype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :keyword frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values range from 1 to 65534. :paramtype frontend_port: int :keyword backend_port: The port used for the internal endpoint. Acceptable values range from 1 to 65535. :paramtype backend_port: int :keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :paramtype idle_timeout_in_minutes: int :keyword enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :paramtype enable_floating_ip: bool :keyword enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :paramtype enable_tcp_reset: bool :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.frontend_ip_configuration = frontend_ip_configuration self.backend_ip_configuration = None self.protocol = protocol self.frontend_port = frontend_port self.backend_port = backend_port self.idle_timeout_in_minutes = idle_timeout_in_minutes self.enable_floating_ip = enable_floating_ip self.enable_tcp_reset = enable_tcp_reset self.provisioning_state = provisioning_state class InboundNatRuleListResult(_serialization.Model): """Response for ListInboundNatRule API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of inbound nat rules in a load balancer. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[InboundNatRule]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.InboundNatRule"]] = None, **kwargs): """ :keyword value: A list of inbound nat rules in a load balancer. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] """ super().__init__(**kwargs) self.value = value self.next_link = None class IPAddressAvailabilityResult(_serialization.Model): """Response for CheckIPAddressAvailability API service call. :ivar available: Private IP address availability. :vartype available: bool :ivar available_ip_addresses: Contains other available private IP addresses if the asked for address is taken. :vartype available_ip_addresses: list[str] """ _attribute_map = { "available": {"key": "available", "type": "bool"}, "available_ip_addresses": {"key": "availableIPAddresses", "type": "[str]"}, } def __init__( self, *, available: Optional[bool] = None, available_ip_addresses: Optional[List[str]] = None, **kwargs ): """ :keyword available: Private IP address availability. :paramtype available: bool :keyword available_ip_addresses: Contains other available private IP addresses if the asked for address is taken. :paramtype available_ip_addresses: list[str] """ super().__init__(**kwargs) self.available = available self.available_ip_addresses = available_ip_addresses class IPConfiguration(SubResource): """IP configuration. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar private_ip_address: The private IP address of the IP configuration. :vartype private_ip_address: str :ivar private_ip_allocation_method: The private IP allocation method. Possible values are 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :vartype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar subnet: The reference of the subnet resource. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :ivar public_ip_address: The reference of the public IP resource. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "private_ip_address": {"key": "properties.privateIPAddress", "type": "str"}, "private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "Subnet"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, private_ip_address: Optional[str] = None, private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, subnet: Optional["_models.Subnet"] = None, public_ip_address: Optional["_models.PublicIPAddress"] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword private_ip_address: The private IP address of the IP configuration. :paramtype private_ip_address: str :keyword private_ip_allocation_method: The private IP allocation method. Possible values are 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :paramtype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword subnet: The reference of the subnet resource. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :keyword public_ip_address: The reference of the public IP resource. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.private_ip_address = private_ip_address self.private_ip_allocation_method = private_ip_allocation_method self.subnet = subnet self.public_ip_address = public_ip_address self.provisioning_state = provisioning_state class IpsecPolicy(_serialization.Model): """An IPSec Policy configuration for a virtual network gateway connection. All required parameters must be populated in order to send to Azure. :ivar sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel. Required. :vartype sa_life_time_seconds: int :ivar sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for a site to site VPN tunnel. Required. :vartype sa_data_size_kilobytes: int :ivar ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and "GCMAES256". :vartype ipsec_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IpsecEncryption :ivar ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values are: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256". :vartype ipsec_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IpsecIntegrity :ivar ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values are: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", and "GCMAES128". :vartype ike_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IkeEncryption :ivar ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", and "GCMAES128". :vartype ike_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IkeIntegrity :ivar dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values are: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and "DHGroup24". :vartype dh_group: str or ~azure.mgmt.network.v2018_07_01.models.DhGroup :ivar pfs_group: The Pfs Groups used in IKE Phase 2 for new child SA. Required. Known values are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", and "PFSMM". :vartype pfs_group: str or ~azure.mgmt.network.v2018_07_01.models.PfsGroup """ _validation = { "sa_life_time_seconds": {"required": True}, "sa_data_size_kilobytes": {"required": True}, "ipsec_encryption": {"required": True}, "ipsec_integrity": {"required": True}, "ike_encryption": {"required": True}, "ike_integrity": {"required": True}, "dh_group": {"required": True}, "pfs_group": {"required": True}, } _attribute_map = { "sa_life_time_seconds": {"key": "saLifeTimeSeconds", "type": "int"}, "sa_data_size_kilobytes": {"key": "saDataSizeKilobytes", "type": "int"}, "ipsec_encryption": {"key": "ipsecEncryption", "type": "str"}, "ipsec_integrity": {"key": "ipsecIntegrity", "type": "str"}, "ike_encryption": {"key": "ikeEncryption", "type": "str"}, "ike_integrity": {"key": "ikeIntegrity", "type": "str"}, "dh_group": {"key": "dhGroup", "type": "str"}, "pfs_group": {"key": "pfsGroup", "type": "str"}, } def __init__( self, *, sa_life_time_seconds: int, sa_data_size_kilobytes: int, ipsec_encryption: Union[str, "_models.IpsecEncryption"], ipsec_integrity: Union[str, "_models.IpsecIntegrity"], ike_encryption: Union[str, "_models.IkeEncryption"], ike_integrity: Union[str, "_models.IkeIntegrity"], dh_group: Union[str, "_models.DhGroup"], pfs_group: Union[str, "_models.PfsGroup"], **kwargs ): """ :keyword sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel. Required. :paramtype sa_life_time_seconds: int :keyword sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for a site to site VPN tunnel. Required. :paramtype sa_data_size_kilobytes: int :keyword ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and "GCMAES256". :paramtype ipsec_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IpsecEncryption :keyword ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values are: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256". :paramtype ipsec_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IpsecIntegrity :keyword ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values are: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", and "GCMAES128". :paramtype ike_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IkeEncryption :keyword ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", and "GCMAES128". :paramtype ike_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IkeIntegrity :keyword dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values are: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and "DHGroup24". :paramtype dh_group: str or ~azure.mgmt.network.v2018_07_01.models.DhGroup :keyword pfs_group: The Pfs Groups used in IKE Phase 2 for new child SA. Required. Known values are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", and "PFSMM". :paramtype pfs_group: str or ~azure.mgmt.network.v2018_07_01.models.PfsGroup """ super().__init__(**kwargs) self.sa_life_time_seconds = sa_life_time_seconds self.sa_data_size_kilobytes = sa_data_size_kilobytes self.ipsec_encryption = ipsec_encryption self.ipsec_integrity = ipsec_integrity self.ike_encryption = ike_encryption self.ike_integrity = ike_integrity self.dh_group = dh_group self.pfs_group = pfs_group class IpTag(_serialization.Model): """Contains the IpTag associated with the object. :ivar ip_tag_type: Gets or sets the ipTag type: Example FirstPartyUsage. :vartype ip_tag_type: str :ivar tag: Gets or sets value of the IpTag associated with the public IP. Example SQL, Storage etc. :vartype tag: str """ _attribute_map = { "ip_tag_type": {"key": "ipTagType", "type": "str"}, "tag": {"key": "tag", "type": "str"}, } def __init__(self, *, ip_tag_type: Optional[str] = None, tag: Optional[str] = None, **kwargs): """ :keyword ip_tag_type: Gets or sets the ipTag type: Example FirstPartyUsage. :paramtype ip_tag_type: str :keyword tag: Gets or sets value of the IpTag associated with the public IP. Example SQL, Storage etc. :paramtype tag: str """ super().__init__(**kwargs) self.ip_tag_type = ip_tag_type self.tag = tag class Ipv6ExpressRouteCircuitPeeringConfig(_serialization.Model): """Contains IPv6 peering config. :ivar primary_peer_address_prefix: The primary address prefix. :vartype primary_peer_address_prefix: str :ivar secondary_peer_address_prefix: The secondary address prefix. :vartype secondary_peer_address_prefix: str :ivar microsoft_peering_config: The Microsoft peering configuration. :vartype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :ivar route_filter: The reference of the RouteFilter resource. :vartype route_filter: ~azure.mgmt.network.v2018_07_01.models.RouteFilter :ivar state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known values are: "Disabled" and "Enabled". :vartype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringState """ _attribute_map = { "primary_peer_address_prefix": {"key": "primaryPeerAddressPrefix", "type": "str"}, "secondary_peer_address_prefix": {"key": "secondaryPeerAddressPrefix", "type": "str"}, "microsoft_peering_config": {"key": "microsoftPeeringConfig", "type": "ExpressRouteCircuitPeeringConfig"}, "route_filter": {"key": "routeFilter", "type": "RouteFilter"}, "state": {"key": "state", "type": "str"}, } def __init__( self, *, primary_peer_address_prefix: Optional[str] = None, secondary_peer_address_prefix: Optional[str] = None, microsoft_peering_config: Optional["_models.ExpressRouteCircuitPeeringConfig"] = None, route_filter: Optional["_models.RouteFilter"] = None, state: Optional[Union[str, "_models.ExpressRouteCircuitPeeringState"]] = None, **kwargs ): """ :keyword primary_peer_address_prefix: The primary address prefix. :paramtype primary_peer_address_prefix: str :keyword secondary_peer_address_prefix: The secondary address prefix. :paramtype secondary_peer_address_prefix: str :keyword microsoft_peering_config: The Microsoft peering configuration. :paramtype microsoft_peering_config: ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringConfig :keyword route_filter: The reference of the RouteFilter resource. :paramtype route_filter: ~azure.mgmt.network.v2018_07_01.models.RouteFilter :keyword state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known values are: "Disabled" and "Enabled". :paramtype state: str or ~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeeringState """ super().__init__(**kwargs) self.primary_peer_address_prefix = primary_peer_address_prefix self.secondary_peer_address_prefix = secondary_peer_address_prefix self.microsoft_peering_config = microsoft_peering_config self.route_filter = route_filter self.state = state class ListHubVirtualNetworkConnectionsResult(_serialization.Model): """List of HubVirtualNetworkConnections and a URL nextLink to get the next set of results. :ivar value: List of HubVirtualNetworkConnections. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.HubVirtualNetworkConnection] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[HubVirtualNetworkConnection]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.HubVirtualNetworkConnection"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of HubVirtualNetworkConnections. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.HubVirtualNetworkConnection] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ListVirtualHubsResult(_serialization.Model): """Result of the request to list VirtualHubs. It contains a list of VirtualHubs and a URL nextLink to get the next set of results. :ivar value: List of VirtualHubs. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualHub] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VirtualHub]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VirtualHub"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of VirtualHubs. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualHub] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ListVirtualWANsResult(_serialization.Model): """Result of the request to list VirtualWANs. It contains a list of VirtualWANs and a URL nextLink to get the next set of results. :ivar value: List of VirtualWANs. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualWAN] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VirtualWAN]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VirtualWAN"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of VirtualWANs. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualWAN] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ListVpnConnectionsResult(_serialization.Model): """Result of the request to list all vpn connections to a virtual wan vpn gateway. It contains a list of Vpn Connections and a URL nextLink to get the next set of results. :ivar value: List of Vpn Connections. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VpnConnection] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VpnConnection]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VpnConnection"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of Vpn Connections. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VpnConnection] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ListVpnGatewaysResult(_serialization.Model): """Result of the request to list VpnGateways. It contains a list of VpnGateways and a URL nextLink to get the next set of results. :ivar value: List of VpnGateways. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VpnGateway] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VpnGateway]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VpnGateway"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: List of VpnGateways. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VpnGateway] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ListVpnSitesResult(_serialization.Model): """Result of the request to list VpnSites. It contains a list of VpnSites and a URL nextLink to get the next set of results. :ivar value: List of VpnSites. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VpnSite] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VpnSite]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.VpnSite"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: List of VpnSites. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VpnSite] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class LoadBalancer(Resource): # pylint: disable=too-many-instance-attributes """LoadBalancer resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar sku: The load balancer SKU. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.LoadBalancerSku :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar frontend_ip_configurations: Object representing the frontend IPs to be used for the load balancer. :vartype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.FrontendIPConfiguration] :ivar backend_address_pools: Collection of backend address pools used by a load balancer. :vartype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] :ivar load_balancing_rules: Object collection representing the load balancing rules Gets the provisioning. :vartype load_balancing_rules: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancingRule] :ivar probes: Collection of probe objects used in the load balancer. :vartype probes: list[~azure.mgmt.network.v2018_07_01.models.Probe] :ivar inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules. :vartype inbound_nat_rules: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] :ivar inbound_nat_pools: Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules. :vartype inbound_nat_pools: list[~azure.mgmt.network.v2018_07_01.models.InboundNatPool] :ivar outbound_rules: The outbound rules. :vartype outbound_rules: list[~azure.mgmt.network.v2018_07_01.models.OutboundRule] :ivar resource_guid: The resource GUID property of the load balancer resource. :vartype resource_guid: str :ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "LoadBalancerSku"}, "etag": {"key": "etag", "type": "str"}, "frontend_ip_configurations": { "key": "properties.frontendIPConfigurations", "type": "[FrontendIPConfiguration]", }, "backend_address_pools": {"key": "properties.backendAddressPools", "type": "[BackendAddressPool]"}, "load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[LoadBalancingRule]"}, "probes": {"key": "properties.probes", "type": "[Probe]"}, "inbound_nat_rules": {"key": "properties.inboundNatRules", "type": "[InboundNatRule]"}, "inbound_nat_pools": {"key": "properties.inboundNatPools", "type": "[InboundNatPool]"}, "outbound_rules": {"key": "properties.outboundRules", "type": "[OutboundRule]"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.LoadBalancerSku"] = None, etag: Optional[str] = None, frontend_ip_configurations: Optional[List["_models.FrontendIPConfiguration"]] = None, backend_address_pools: Optional[List["_models.BackendAddressPool"]] = None, load_balancing_rules: Optional[List["_models.LoadBalancingRule"]] = None, probes: Optional[List["_models.Probe"]] = None, inbound_nat_rules: Optional[List["_models.InboundNatRule"]] = None, inbound_nat_pools: Optional[List["_models.InboundNatPool"]] = None, outbound_rules: Optional[List["_models.OutboundRule"]] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword sku: The load balancer SKU. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.LoadBalancerSku :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword frontend_ip_configurations: Object representing the frontend IPs to be used for the load balancer. :paramtype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.FrontendIPConfiguration] :keyword backend_address_pools: Collection of backend address pools used by a load balancer. :paramtype backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] :keyword load_balancing_rules: Object collection representing the load balancing rules Gets the provisioning. :paramtype load_balancing_rules: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancingRule] :keyword probes: Collection of probe objects used in the load balancer. :paramtype probes: list[~azure.mgmt.network.v2018_07_01.models.Probe] :keyword inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules. :paramtype inbound_nat_rules: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] :keyword inbound_nat_pools: Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules. :paramtype inbound_nat_pools: list[~azure.mgmt.network.v2018_07_01.models.InboundNatPool] :keyword outbound_rules: The outbound rules. :paramtype outbound_rules: list[~azure.mgmt.network.v2018_07_01.models.OutboundRule] :keyword resource_guid: The resource GUID property of the load balancer resource. :paramtype resource_guid: str :keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.sku = sku self.etag = etag self.frontend_ip_configurations = frontend_ip_configurations self.backend_address_pools = backend_address_pools self.load_balancing_rules = load_balancing_rules self.probes = probes self.inbound_nat_rules = inbound_nat_rules self.inbound_nat_pools = inbound_nat_pools self.outbound_rules = outbound_rules self.resource_guid = resource_guid self.provisioning_state = provisioning_state class LoadBalancerBackendAddressPoolListResult(_serialization.Model): """Response for ListBackendAddressPool API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of backend address pools in a load balancer. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[BackendAddressPool]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.BackendAddressPool"]] = None, **kwargs): """ :keyword value: A list of backend address pools in a load balancer. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] """ super().__init__(**kwargs) self.value = value self.next_link = None class LoadBalancerFrontendIPConfigurationListResult(_serialization.Model): """Response for ListFrontendIPConfiguration API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of frontend IP configurations in a load balancer. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.FrontendIPConfiguration] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[FrontendIPConfiguration]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.FrontendIPConfiguration"]] = None, **kwargs): """ :keyword value: A list of frontend IP configurations in a load balancer. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.FrontendIPConfiguration] """ super().__init__(**kwargs) self.value = value self.next_link = None class LoadBalancerListResult(_serialization.Model): """Response for ListLoadBalancers API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of load balancers in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancer] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[LoadBalancer]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.LoadBalancer"]] = None, **kwargs): """ :keyword value: A list of load balancers in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancer] """ super().__init__(**kwargs) self.value = value self.next_link = None class LoadBalancerLoadBalancingRuleListResult(_serialization.Model): """Response for ListLoadBalancingRule API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of load balancing rules in a load balancer. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancingRule] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[LoadBalancingRule]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.LoadBalancingRule"]] = None, **kwargs): """ :keyword value: A list of load balancing rules in a load balancer. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancingRule] """ super().__init__(**kwargs) self.value = value self.next_link = None class LoadBalancerProbeListResult(_serialization.Model): """Response for ListProbe API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of probes in a load balancer. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.Probe] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[Probe]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.Probe"]] = None, **kwargs): """ :keyword value: A list of probes in a load balancer. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.Probe] """ super().__init__(**kwargs) self.value = value self.next_link = None class LoadBalancerSku(_serialization.Model): """SKU of a load balancer. :ivar name: Name of a load balancer SKU. Known values are: "Basic" and "Standard". :vartype name: str or ~azure.mgmt.network.v2018_07_01.models.LoadBalancerSkuName """ _attribute_map = { "name": {"key": "name", "type": "str"}, } def __init__(self, *, name: Optional[Union[str, "_models.LoadBalancerSkuName"]] = None, **kwargs): """ :keyword name: Name of a load balancer SKU. Known values are: "Basic" and "Standard". :paramtype name: str or ~azure.mgmt.network.v2018_07_01.models.LoadBalancerSkuName """ super().__init__(**kwargs) self.name = name class LoadBalancingRule(SubResource): # pylint: disable=too-many-instance-attributes """A load balancing rule for a load balancer. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar frontend_ip_configuration: A reference to frontend IP addresses. :vartype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs. :vartype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar probe: The reference of the load balancer probe used by the load balancing rule. :vartype probe: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :ivar load_distribution: The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and 'SourceIPProtocol'. Known values are: "Default", "SourceIP", and "SourceIPProtocol". :vartype load_distribution: str or ~azure.mgmt.network.v2018_07_01.models.LoadDistribution :ivar frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 0 and 65534. Note that value 0 enables "Any Port". :vartype frontend_port: int :ivar backend_port: The port used for internal connections on the endpoint. Acceptable values are between 0 and 65535. Note that value 0 enables "Any Port". :vartype backend_port: int :ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :vartype idle_timeout_in_minutes: int :ivar enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :vartype enable_floating_ip: bool :ivar enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :vartype enable_tcp_reset: bool :ivar disable_outbound_snat: Configures SNAT for the VMs in the backend pool to use the publicIP address specified in the frontend of the load balancing rule. :vartype disable_outbound_snat: bool :ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"}, "backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"}, "probe": {"key": "properties.probe", "type": "SubResource"}, "protocol": {"key": "properties.protocol", "type": "str"}, "load_distribution": {"key": "properties.loadDistribution", "type": "str"}, "frontend_port": {"key": "properties.frontendPort", "type": "int"}, "backend_port": {"key": "properties.backendPort", "type": "int"}, "idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"}, "enable_floating_ip": {"key": "properties.enableFloatingIP", "type": "bool"}, "enable_tcp_reset": {"key": "properties.enableTcpReset", "type": "bool"}, "disable_outbound_snat": {"key": "properties.disableOutboundSnat", "type": "bool"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, frontend_ip_configuration: Optional["_models.SubResource"] = None, backend_address_pool: Optional["_models.SubResource"] = None, probe: Optional["_models.SubResource"] = None, protocol: Optional[Union[str, "_models.TransportProtocol"]] = None, load_distribution: Optional[Union[str, "_models.LoadDistribution"]] = None, frontend_port: Optional[int] = None, backend_port: Optional[int] = None, idle_timeout_in_minutes: Optional[int] = None, enable_floating_ip: Optional[bool] = None, enable_tcp_reset: Optional[bool] = None, disable_outbound_snat: Optional[bool] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword frontend_ip_configuration: A reference to frontend IP addresses. :paramtype frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs. :paramtype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword probe: The reference of the load balancer probe used by the load balancing rule. :paramtype probe: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword protocol: The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'. Known values are: "Udp", "Tcp", and "All". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.TransportProtocol :keyword load_distribution: The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and 'SourceIPProtocol'. Known values are: "Default", "SourceIP", and "SourceIPProtocol". :paramtype load_distribution: str or ~azure.mgmt.network.v2018_07_01.models.LoadDistribution :keyword frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 0 and 65534. Note that value 0 enables "Any Port". :paramtype frontend_port: int :keyword backend_port: The port used for internal connections on the endpoint. Acceptable values are between 0 and 65535. Note that value 0 enables "Any Port". :paramtype backend_port: int :keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP. :paramtype idle_timeout_in_minutes: int :keyword enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint. :paramtype enable_floating_ip: bool :keyword enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :paramtype enable_tcp_reset: bool :keyword disable_outbound_snat: Configures SNAT for the VMs in the backend pool to use the publicIP address specified in the frontend of the load balancing rule. :paramtype disable_outbound_snat: bool :keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.frontend_ip_configuration = frontend_ip_configuration self.backend_address_pool = backend_address_pool self.probe = probe self.protocol = protocol self.load_distribution = load_distribution self.frontend_port = frontend_port self.backend_port = backend_port self.idle_timeout_in_minutes = idle_timeout_in_minutes self.enable_floating_ip = enable_floating_ip self.enable_tcp_reset = enable_tcp_reset self.disable_outbound_snat = disable_outbound_snat self.provisioning_state = provisioning_state class LocalNetworkGateway(Resource): # pylint: disable=too-many-instance-attributes """A common class for general resource information. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar local_network_address_space: Local network site address space. :vartype local_network_address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :ivar gateway_ip_address: IP address of local network gateway. :vartype gateway_ip_address: str :ivar bgp_settings: Local network gateway's BGP speaker settings. :vartype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :ivar resource_guid: The resource GUID property of the LocalNetworkGateway resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the LocalNetworkGateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "local_network_address_space": {"key": "properties.localNetworkAddressSpace", "type": "AddressSpace"}, "gateway_ip_address": {"key": "properties.gatewayIpAddress", "type": "str"}, "bgp_settings": {"key": "properties.bgpSettings", "type": "BgpSettings"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, local_network_address_space: Optional["_models.AddressSpace"] = None, gateway_ip_address: Optional[str] = None, bgp_settings: Optional["_models.BgpSettings"] = None, resource_guid: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword local_network_address_space: Local network site address space. :paramtype local_network_address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :keyword gateway_ip_address: IP address of local network gateway. :paramtype gateway_ip_address: str :keyword bgp_settings: Local network gateway's BGP speaker settings. :paramtype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :keyword resource_guid: The resource GUID property of the LocalNetworkGateway resource. :paramtype resource_guid: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.local_network_address_space = local_network_address_space self.gateway_ip_address = gateway_ip_address self.bgp_settings = bgp_settings self.resource_guid = resource_guid self.provisioning_state = None class LocalNetworkGatewayListResult(_serialization.Model): """Response for ListLocalNetworkGateways API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of local network gateways that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.LocalNetworkGateway] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[LocalNetworkGateway]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.LocalNetworkGateway"]] = None, **kwargs): """ :keyword value: A list of local network gateways that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.LocalNetworkGateway] """ super().__init__(**kwargs) self.value = value self.next_link = None class LogSpecification(_serialization.Model): """Description of logging specification. :ivar name: The name of the specification. :vartype name: str :ivar display_name: The display name of the specification. :vartype display_name: str :ivar blob_duration: Duration of the blob. :vartype blob_duration: str """ _attribute_map = { "name": {"key": "name", "type": "str"}, "display_name": {"key": "displayName", "type": "str"}, "blob_duration": {"key": "blobDuration", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, blob_duration: Optional[str] = None, **kwargs ): """ :keyword name: The name of the specification. :paramtype name: str :keyword display_name: The display name of the specification. :paramtype display_name: str :keyword blob_duration: Duration of the blob. :paramtype blob_duration: str """ super().__init__(**kwargs) self.name = name self.display_name = display_name self.blob_duration = blob_duration class MatchedRule(_serialization.Model): """Matched rule. :ivar rule_name: Name of the matched network security rule. :vartype rule_name: str :ivar action: The network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'. :vartype action: str """ _attribute_map = { "rule_name": {"key": "ruleName", "type": "str"}, "action": {"key": "action", "type": "str"}, } def __init__(self, *, rule_name: Optional[str] = None, action: Optional[str] = None, **kwargs): """ :keyword rule_name: Name of the matched network security rule. :paramtype rule_name: str :keyword action: The network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'. :paramtype action: str """ super().__init__(**kwargs) self.rule_name = rule_name self.action = action class MetricSpecification(_serialization.Model): # pylint: disable=too-many-instance-attributes """Description of metrics specification. :ivar name: The name of the metric. :vartype name: str :ivar display_name: The display name of the metric. :vartype display_name: str :ivar display_description: The description of the metric. :vartype display_description: str :ivar unit: Units the metric to be displayed in. :vartype unit: str :ivar aggregation_type: The aggregation type. :vartype aggregation_type: str :ivar availabilities: List of availability. :vartype availabilities: list[~azure.mgmt.network.v2018_07_01.models.Availability] :ivar enable_regional_mdm_account: Whether regional MDM account enabled. :vartype enable_regional_mdm_account: bool :ivar fill_gap_with_zero: Whether gaps would be filled with zeros. :vartype fill_gap_with_zero: bool :ivar metric_filter_pattern: Pattern for the filter of the metric. :vartype metric_filter_pattern: str :ivar dimensions: List of dimensions. :vartype dimensions: list[~azure.mgmt.network.v2018_07_01.models.Dimension] :ivar is_internal: Whether the metric is internal. :vartype is_internal: bool :ivar source_mdm_account: The source MDM account. :vartype source_mdm_account: str :ivar source_mdm_namespace: The source MDM namespace. :vartype source_mdm_namespace: str :ivar resource_id_dimension_name_override: The resource Id dimension name override. :vartype resource_id_dimension_name_override: str """ _attribute_map = { "name": {"key": "name", "type": "str"}, "display_name": {"key": "displayName", "type": "str"}, "display_description": {"key": "displayDescription", "type": "str"}, "unit": {"key": "unit", "type": "str"}, "aggregation_type": {"key": "aggregationType", "type": "str"}, "availabilities": {"key": "availabilities", "type": "[Availability]"}, "enable_regional_mdm_account": {"key": "enableRegionalMdmAccount", "type": "bool"}, "fill_gap_with_zero": {"key": "fillGapWithZero", "type": "bool"}, "metric_filter_pattern": {"key": "metricFilterPattern", "type": "str"}, "dimensions": {"key": "dimensions", "type": "[Dimension]"}, "is_internal": {"key": "isInternal", "type": "bool"}, "source_mdm_account": {"key": "sourceMdmAccount", "type": "str"}, "source_mdm_namespace": {"key": "sourceMdmNamespace", "type": "str"}, "resource_id_dimension_name_override": {"key": "resourceIdDimensionNameOverride", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, display_description: Optional[str] = None, unit: Optional[str] = None, aggregation_type: Optional[str] = None, availabilities: Optional[List["_models.Availability"]] = None, enable_regional_mdm_account: Optional[bool] = None, fill_gap_with_zero: Optional[bool] = None, metric_filter_pattern: Optional[str] = None, dimensions: Optional[List["_models.Dimension"]] = None, is_internal: Optional[bool] = None, source_mdm_account: Optional[str] = None, source_mdm_namespace: Optional[str] = None, resource_id_dimension_name_override: Optional[str] = None, **kwargs ): """ :keyword name: The name of the metric. :paramtype name: str :keyword display_name: The display name of the metric. :paramtype display_name: str :keyword display_description: The description of the metric. :paramtype display_description: str :keyword unit: Units the metric to be displayed in. :paramtype unit: str :keyword aggregation_type: The aggregation type. :paramtype aggregation_type: str :keyword availabilities: List of availability. :paramtype availabilities: list[~azure.mgmt.network.v2018_07_01.models.Availability] :keyword enable_regional_mdm_account: Whether regional MDM account enabled. :paramtype enable_regional_mdm_account: bool :keyword fill_gap_with_zero: Whether gaps would be filled with zeros. :paramtype fill_gap_with_zero: bool :keyword metric_filter_pattern: Pattern for the filter of the metric. :paramtype metric_filter_pattern: str :keyword dimensions: List of dimensions. :paramtype dimensions: list[~azure.mgmt.network.v2018_07_01.models.Dimension] :keyword is_internal: Whether the metric is internal. :paramtype is_internal: bool :keyword source_mdm_account: The source MDM account. :paramtype source_mdm_account: str :keyword source_mdm_namespace: The source MDM namespace. :paramtype source_mdm_namespace: str :keyword resource_id_dimension_name_override: The resource Id dimension name override. :paramtype resource_id_dimension_name_override: str """ super().__init__(**kwargs) self.name = name self.display_name = display_name self.display_description = display_description self.unit = unit self.aggregation_type = aggregation_type self.availabilities = availabilities self.enable_regional_mdm_account = enable_regional_mdm_account self.fill_gap_with_zero = fill_gap_with_zero self.metric_filter_pattern = metric_filter_pattern self.dimensions = dimensions self.is_internal = is_internal self.source_mdm_account = source_mdm_account self.source_mdm_namespace = source_mdm_namespace self.resource_id_dimension_name_override = resource_id_dimension_name_override class NetworkConfigurationDiagnosticParameters(_serialization.Model): """Parameters to get network configuration diagnostic. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The ID of the target resource to perform network configuration diagnostic. Valid options are VM, NetworkInterface, VMSS/NetworkInterface and Application Gateway. Required. :vartype target_resource_id: str :ivar queries: List of traffic queries. Required. :vartype queries: list[~azure.mgmt.network.v2018_07_01.models.TrafficQuery] """ _validation = { "target_resource_id": {"required": True}, "queries": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, "queries": {"key": "queries", "type": "[TrafficQuery]"}, } def __init__(self, *, target_resource_id: str, queries: List["_models.TrafficQuery"], **kwargs): """ :keyword target_resource_id: The ID of the target resource to perform network configuration diagnostic. Valid options are VM, NetworkInterface, VMSS/NetworkInterface and Application Gateway. Required. :paramtype target_resource_id: str :keyword queries: List of traffic queries. Required. :paramtype queries: list[~azure.mgmt.network.v2018_07_01.models.TrafficQuery] """ super().__init__(**kwargs) self.target_resource_id = target_resource_id self.queries = queries class NetworkConfigurationDiagnosticResponse(_serialization.Model): """Results of network configuration diagnostic on the target resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar results: List of network configuration diagnostic results. :vartype results: list[~azure.mgmt.network.v2018_07_01.models.NetworkConfigurationDiagnosticResult] """ _validation = { "results": {"readonly": True}, } _attribute_map = { "results": {"key": "results", "type": "[NetworkConfigurationDiagnosticResult]"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.results = None class NetworkConfigurationDiagnosticResult(_serialization.Model): """Network configuration diagnostic result corresponded to provided traffic query. :ivar traffic_query: Parameters to compare with network configuration. :vartype traffic_query: ~azure.mgmt.network.v2018_07_01.models.TrafficQuery :ivar network_security_group_result: Network configuration diagnostic result corresponded provided traffic query. :vartype network_security_group_result: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroupResult """ _attribute_map = { "traffic_query": {"key": "trafficQuery", "type": "TrafficQuery"}, "network_security_group_result": {"key": "networkSecurityGroupResult", "type": "NetworkSecurityGroupResult"}, } def __init__( self, *, traffic_query: Optional["_models.TrafficQuery"] = None, network_security_group_result: Optional["_models.NetworkSecurityGroupResult"] = None, **kwargs ): """ :keyword traffic_query: Parameters to compare with network configuration. :paramtype traffic_query: ~azure.mgmt.network.v2018_07_01.models.TrafficQuery :keyword network_security_group_result: Network configuration diagnostic result corresponded provided traffic query. :paramtype network_security_group_result: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroupResult """ super().__init__(**kwargs) self.traffic_query = traffic_query self.network_security_group_result = network_security_group_result class NetworkInterface(Resource): # pylint: disable=too-many-instance-attributes """A network interface in a resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar virtual_machine: The reference of a virtual machine. :vartype virtual_machine: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar network_security_group: The reference of the NetworkSecurityGroup resource. :vartype network_security_group: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup :ivar ip_configurations: A list of IPConfigurations of the network interface. :vartype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :ivar dns_settings: The DNS settings in network interface. :vartype dns_settings: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceDnsSettings :ivar mac_address: The MAC address of the network interface. :vartype mac_address: str :ivar primary: Gets whether this is a primary network interface on a virtual machine. :vartype primary: bool :ivar enable_accelerated_networking: If the network interface is accelerated networking enabled. :vartype enable_accelerated_networking: bool :ivar enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network interface. :vartype enable_ip_forwarding: bool :ivar resource_guid: The resource GUID property of the network interface resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "virtual_machine": {"key": "properties.virtualMachine", "type": "SubResource"}, "network_security_group": {"key": "properties.networkSecurityGroup", "type": "NetworkSecurityGroup"}, "ip_configurations": {"key": "properties.ipConfigurations", "type": "[NetworkInterfaceIPConfiguration]"}, "dns_settings": {"key": "properties.dnsSettings", "type": "NetworkInterfaceDnsSettings"}, "mac_address": {"key": "properties.macAddress", "type": "str"}, "primary": {"key": "properties.primary", "type": "bool"}, "enable_accelerated_networking": {"key": "properties.enableAcceleratedNetworking", "type": "bool"}, "enable_ip_forwarding": {"key": "properties.enableIPForwarding", "type": "bool"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, virtual_machine: Optional["_models.SubResource"] = None, network_security_group: Optional["_models.NetworkSecurityGroup"] = None, ip_configurations: Optional[List["_models.NetworkInterfaceIPConfiguration"]] = None, dns_settings: Optional["_models.NetworkInterfaceDnsSettings"] = None, mac_address: Optional[str] = None, primary: Optional[bool] = None, enable_accelerated_networking: Optional[bool] = None, enable_ip_forwarding: Optional[bool] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword virtual_machine: The reference of a virtual machine. :paramtype virtual_machine: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword network_security_group: The reference of the NetworkSecurityGroup resource. :paramtype network_security_group: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup :keyword ip_configurations: A list of IPConfigurations of the network interface. :paramtype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :keyword dns_settings: The DNS settings in network interface. :paramtype dns_settings: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceDnsSettings :keyword mac_address: The MAC address of the network interface. :paramtype mac_address: str :keyword primary: Gets whether this is a primary network interface on a virtual machine. :paramtype primary: bool :keyword enable_accelerated_networking: If the network interface is accelerated networking enabled. :paramtype enable_accelerated_networking: bool :keyword enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network interface. :paramtype enable_ip_forwarding: bool :keyword resource_guid: The resource GUID property of the network interface resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.virtual_machine = virtual_machine self.network_security_group = network_security_group self.ip_configurations = ip_configurations self.dns_settings = dns_settings self.mac_address = mac_address self.primary = primary self.enable_accelerated_networking = enable_accelerated_networking self.enable_ip_forwarding = enable_ip_forwarding self.resource_guid = resource_guid self.provisioning_state = provisioning_state class NetworkInterfaceAssociation(_serialization.Model): """Network interface and its custom security rules. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Network interface ID. :vartype id: str :ivar security_rules: Collection of custom security rules. :vartype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] """ _validation = { "id": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "security_rules": {"key": "securityRules", "type": "[SecurityRule]"}, } def __init__(self, *, security_rules: Optional[List["_models.SecurityRule"]] = None, **kwargs): """ :keyword security_rules: Collection of custom security rules. :paramtype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] """ super().__init__(**kwargs) self.id = None self.security_rules = security_rules class NetworkInterfaceDnsSettings(_serialization.Model): """DNS settings of a network interface. :ivar dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in dnsServers collection. :vartype dns_servers: list[str] :ivar applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then this list will have the union of all DNS servers from all NICs that are part of the Availability Set. This property is what is configured on each of those VMs. :vartype applied_dns_servers: list[str] :ivar internal_dns_name_label: Relative DNS name for this NIC used for internal communications between VMs in the same virtual network. :vartype internal_dns_name_label: str :ivar internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in the same virtual network. :vartype internal_fqdn: str :ivar internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of internalDomainNameSuffix. :vartype internal_domain_name_suffix: str """ _attribute_map = { "dns_servers": {"key": "dnsServers", "type": "[str]"}, "applied_dns_servers": {"key": "appliedDnsServers", "type": "[str]"}, "internal_dns_name_label": {"key": "internalDnsNameLabel", "type": "str"}, "internal_fqdn": {"key": "internalFqdn", "type": "str"}, "internal_domain_name_suffix": {"key": "internalDomainNameSuffix", "type": "str"}, } def __init__( self, *, dns_servers: Optional[List[str]] = None, applied_dns_servers: Optional[List[str]] = None, internal_dns_name_label: Optional[str] = None, internal_fqdn: Optional[str] = None, internal_domain_name_suffix: Optional[str] = None, **kwargs ): """ :keyword dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in dnsServers collection. :paramtype dns_servers: list[str] :keyword applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then this list will have the union of all DNS servers from all NICs that are part of the Availability Set. This property is what is configured on each of those VMs. :paramtype applied_dns_servers: list[str] :keyword internal_dns_name_label: Relative DNS name for this NIC used for internal communications between VMs in the same virtual network. :paramtype internal_dns_name_label: str :keyword internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in the same virtual network. :paramtype internal_fqdn: str :keyword internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of internalDomainNameSuffix. :paramtype internal_domain_name_suffix: str """ super().__init__(**kwargs) self.dns_servers = dns_servers self.applied_dns_servers = applied_dns_servers self.internal_dns_name_label = internal_dns_name_label self.internal_fqdn = internal_fqdn self.internal_domain_name_suffix = internal_domain_name_suffix class NetworkInterfaceIPConfiguration(SubResource): # pylint: disable=too-many-instance-attributes """IPConfiguration in a network interface. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar application_gateway_backend_address_pools: The reference of ApplicationGatewayBackendAddressPool resource. :vartype application_gateway_backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool] :ivar load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool resource. :vartype load_balancer_backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] :ivar load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules. :vartype load_balancer_inbound_nat_rules: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] :ivar private_ip_address: Private IP address of the IP configuration. :vartype private_ip_address: str :ivar private_ip_allocation_method: Defines how a private IP address is assigned. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :vartype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :vartype private_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :ivar subnet: Subnet bound to the IP configuration. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :ivar primary: Gets whether this is a primary customer address on the network interface. :vartype primary: bool :ivar public_ip_address: Public IP address bound to the IP configuration. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :ivar application_security_groups: Application security groups in which the IP configuration is included. :vartype application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :ivar provisioning_state: The provisioning state of the network interface IP configuration. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "application_gateway_backend_address_pools": { "key": "properties.applicationGatewayBackendAddressPools", "type": "[ApplicationGatewayBackendAddressPool]", }, "load_balancer_backend_address_pools": { "key": "properties.loadBalancerBackendAddressPools", "type": "[BackendAddressPool]", }, "load_balancer_inbound_nat_rules": { "key": "properties.loadBalancerInboundNatRules", "type": "[InboundNatRule]", }, "private_ip_address": {"key": "properties.privateIPAddress", "type": "str"}, "private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"}, "private_ip_address_version": {"key": "properties.privateIPAddressVersion", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "Subnet"}, "primary": {"key": "properties.primary", "type": "bool"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"}, "application_security_groups": { "key": "properties.applicationSecurityGroups", "type": "[ApplicationSecurityGroup]", }, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, application_gateway_backend_address_pools: Optional[ List["_models.ApplicationGatewayBackendAddressPool"] ] = None, load_balancer_backend_address_pools: Optional[List["_models.BackendAddressPool"]] = None, load_balancer_inbound_nat_rules: Optional[List["_models.InboundNatRule"]] = None, private_ip_address: Optional[str] = None, private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, private_ip_address_version: Optional[Union[str, "_models.IPVersion"]] = None, subnet: Optional["_models.Subnet"] = None, primary: Optional[bool] = None, public_ip_address: Optional["_models.PublicIPAddress"] = None, application_security_groups: Optional[List["_models.ApplicationSecurityGroup"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword application_gateway_backend_address_pools: The reference of ApplicationGatewayBackendAddressPool resource. :paramtype application_gateway_backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.ApplicationGatewayBackendAddressPool] :keyword load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool resource. :paramtype load_balancer_backend_address_pools: list[~azure.mgmt.network.v2018_07_01.models.BackendAddressPool] :keyword load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules. :paramtype load_balancer_inbound_nat_rules: list[~azure.mgmt.network.v2018_07_01.models.InboundNatRule] :keyword private_ip_address: Private IP address of the IP configuration. :paramtype private_ip_address: str :keyword private_ip_allocation_method: Defines how a private IP address is assigned. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :paramtype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :paramtype private_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :keyword subnet: Subnet bound to the IP configuration. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.Subnet :keyword primary: Gets whether this is a primary customer address on the network interface. :paramtype primary: bool :keyword public_ip_address: Public IP address bound to the IP configuration. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddress :keyword application_security_groups: Application security groups in which the IP configuration is included. :paramtype application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :keyword provisioning_state: The provisioning state of the network interface IP configuration. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.application_gateway_backend_address_pools = application_gateway_backend_address_pools self.load_balancer_backend_address_pools = load_balancer_backend_address_pools self.load_balancer_inbound_nat_rules = load_balancer_inbound_nat_rules self.private_ip_address = private_ip_address self.private_ip_allocation_method = private_ip_allocation_method self.private_ip_address_version = private_ip_address_version self.subnet = subnet self.primary = primary self.public_ip_address = public_ip_address self.application_security_groups = application_security_groups self.provisioning_state = provisioning_state class NetworkInterfaceIPConfigurationListResult(_serialization.Model): """Response for list ip configurations API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of ip configurations. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[NetworkInterfaceIPConfiguration]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.NetworkInterfaceIPConfiguration"]] = None, **kwargs): """ :keyword value: A list of ip configurations. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration] """ super().__init__(**kwargs) self.value = value self.next_link = None class NetworkInterfaceListResult(_serialization.Model): """Response for the ListNetworkInterface API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of network interfaces in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterface] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[NetworkInterface]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.NetworkInterface"]] = None, **kwargs): """ :keyword value: A list of network interfaces in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterface] """ super().__init__(**kwargs) self.value = value self.next_link = None class NetworkInterfaceLoadBalancerListResult(_serialization.Model): """Response for list ip configurations API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of load balancers. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancer] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[LoadBalancer]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.LoadBalancer"]] = None, **kwargs): """ :keyword value: A list of load balancers. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.LoadBalancer] """ super().__init__(**kwargs) self.value = value self.next_link = None class NetworkSecurityGroup(Resource): # pylint: disable=too-many-instance-attributes """NetworkSecurityGroup resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar security_rules: A collection of security rules of the network security group. :vartype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :ivar default_security_rules: The default security rules of network security group. :vartype default_security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :ivar network_interfaces: A collection of references to network interfaces. :vartype network_interfaces: list[~azure.mgmt.network.v2018_07_01.models.NetworkInterface] :ivar subnets: A collection of references to subnets. :vartype subnets: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :ivar resource_guid: The resource GUID property of the network security group resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "network_interfaces": {"readonly": True}, "subnets": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "security_rules": {"key": "properties.securityRules", "type": "[SecurityRule]"}, "default_security_rules": {"key": "properties.defaultSecurityRules", "type": "[SecurityRule]"}, "network_interfaces": {"key": "properties.networkInterfaces", "type": "[NetworkInterface]"}, "subnets": {"key": "properties.subnets", "type": "[Subnet]"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, security_rules: Optional[List["_models.SecurityRule"]] = None, default_security_rules: Optional[List["_models.SecurityRule"]] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword security_rules: A collection of security rules of the network security group. :paramtype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :keyword default_security_rules: The default security rules of network security group. :paramtype default_security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :keyword resource_guid: The resource GUID property of the network security group resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.security_rules = security_rules self.default_security_rules = default_security_rules self.network_interfaces = None self.subnets = None self.resource_guid = resource_guid self.provisioning_state = provisioning_state class NetworkSecurityGroupListResult(_serialization.Model): """Response for ListNetworkSecurityGroups API service call. :ivar value: A list of NetworkSecurityGroup resources. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[NetworkSecurityGroup]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.NetworkSecurityGroup"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of NetworkSecurityGroup resources. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class NetworkSecurityGroupResult(_serialization.Model): """Network configuration diagnostic result corresponded provided traffic query. Variables are only populated by the server, and will be ignored when sending a request. :ivar security_rule_access_result: The network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :vartype security_rule_access_result: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess :ivar evaluated_network_security_groups: List of results network security groups diagnostic. :vartype evaluated_network_security_groups: list[~azure.mgmt.network.v2018_07_01.models.EvaluatedNetworkSecurityGroup] """ _validation = { "evaluated_network_security_groups": {"readonly": True}, } _attribute_map = { "security_rule_access_result": {"key": "securityRuleAccessResult", "type": "str"}, "evaluated_network_security_groups": { "key": "evaluatedNetworkSecurityGroups", "type": "[EvaluatedNetworkSecurityGroup]", }, } def __init__( self, *, security_rule_access_result: Optional[Union[str, "_models.SecurityRuleAccess"]] = None, **kwargs ): """ :keyword security_rule_access_result: The network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :paramtype security_rule_access_result: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess """ super().__init__(**kwargs) self.security_rule_access_result = security_rule_access_result self.evaluated_network_security_groups = None class NetworkSecurityRulesEvaluationResult(_serialization.Model): """Network security rules evaluation result. :ivar name: Name of the network security rule. :vartype name: str :ivar protocol_matched: Value indicating whether protocol is matched. :vartype protocol_matched: bool :ivar source_matched: Value indicating whether source is matched. :vartype source_matched: bool :ivar source_port_matched: Value indicating whether source port is matched. :vartype source_port_matched: bool :ivar destination_matched: Value indicating whether destination is matched. :vartype destination_matched: bool :ivar destination_port_matched: Value indicating whether destination port is matched. :vartype destination_port_matched: bool """ _attribute_map = { "name": {"key": "name", "type": "str"}, "protocol_matched": {"key": "protocolMatched", "type": "bool"}, "source_matched": {"key": "sourceMatched", "type": "bool"}, "source_port_matched": {"key": "sourcePortMatched", "type": "bool"}, "destination_matched": {"key": "destinationMatched", "type": "bool"}, "destination_port_matched": {"key": "destinationPortMatched", "type": "bool"}, } def __init__( self, *, name: Optional[str] = None, protocol_matched: Optional[bool] = None, source_matched: Optional[bool] = None, source_port_matched: Optional[bool] = None, destination_matched: Optional[bool] = None, destination_port_matched: Optional[bool] = None, **kwargs ): """ :keyword name: Name of the network security rule. :paramtype name: str :keyword protocol_matched: Value indicating whether protocol is matched. :paramtype protocol_matched: bool :keyword source_matched: Value indicating whether source is matched. :paramtype source_matched: bool :keyword source_port_matched: Value indicating whether source port is matched. :paramtype source_port_matched: bool :keyword destination_matched: Value indicating whether destination is matched. :paramtype destination_matched: bool :keyword destination_port_matched: Value indicating whether destination port is matched. :paramtype destination_port_matched: bool """ super().__init__(**kwargs) self.name = name self.protocol_matched = protocol_matched self.source_matched = source_matched self.source_port_matched = source_port_matched self.destination_matched = destination_matched self.destination_port_matched = destination_port_matched class NetworkWatcher(Resource): """Network watcher in a resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.provisioning_state = None class NetworkWatcherListResult(_serialization.Model): """List of network watcher resources. :ivar value: :vartype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkWatcher] """ _attribute_map = { "value": {"key": "value", "type": "[NetworkWatcher]"}, } def __init__(self, *, value: Optional[List["_models.NetworkWatcher"]] = None, **kwargs): """ :keyword value: :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.NetworkWatcher] """ super().__init__(**kwargs) self.value = value class NextHopParameters(_serialization.Model): """Parameters that define the source and destination endpoint. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The resource identifier of the target resource against which the action is to be performed. Required. :vartype target_resource_id: str :ivar source_ip_address: The source IP address. Required. :vartype source_ip_address: str :ivar destination_ip_address: The destination IP address. Required. :vartype destination_ip_address: str :ivar target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled on any of the nics, then this parameter must be specified. Otherwise optional). :vartype target_nic_resource_id: str """ _validation = { "target_resource_id": {"required": True}, "source_ip_address": {"required": True}, "destination_ip_address": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, "source_ip_address": {"key": "sourceIPAddress", "type": "str"}, "destination_ip_address": {"key": "destinationIPAddress", "type": "str"}, "target_nic_resource_id": {"key": "targetNicResourceId", "type": "str"}, } def __init__( self, *, target_resource_id: str, source_ip_address: str, destination_ip_address: str, target_nic_resource_id: Optional[str] = None, **kwargs ): """ :keyword target_resource_id: The resource identifier of the target resource against which the action is to be performed. Required. :paramtype target_resource_id: str :keyword source_ip_address: The source IP address. Required. :paramtype source_ip_address: str :keyword destination_ip_address: The destination IP address. Required. :paramtype destination_ip_address: str :keyword target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled on any of the nics, then this parameter must be specified. Otherwise optional). :paramtype target_nic_resource_id: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id self.source_ip_address = source_ip_address self.destination_ip_address = destination_ip_address self.target_nic_resource_id = target_nic_resource_id class NextHopResult(_serialization.Model): """The information about next hop from the specified VM. :ivar next_hop_type: Next hop type. Known values are: "Internet", "VirtualAppliance", "VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", and "None". :vartype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.NextHopType :ivar next_hop_ip_address: Next hop IP Address. :vartype next_hop_ip_address: str :ivar route_table_id: The resource identifier for the route table associated with the route being returned. If the route being returned does not correspond to any user created routes then this field will be the string 'System Route'. :vartype route_table_id: str """ _attribute_map = { "next_hop_type": {"key": "nextHopType", "type": "str"}, "next_hop_ip_address": {"key": "nextHopIpAddress", "type": "str"}, "route_table_id": {"key": "routeTableId", "type": "str"}, } def __init__( self, *, next_hop_type: Optional[Union[str, "_models.NextHopType"]] = None, next_hop_ip_address: Optional[str] = None, route_table_id: Optional[str] = None, **kwargs ): """ :keyword next_hop_type: Next hop type. Known values are: "Internet", "VirtualAppliance", "VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", and "None". :paramtype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.NextHopType :keyword next_hop_ip_address: Next hop IP Address. :paramtype next_hop_ip_address: str :keyword route_table_id: The resource identifier for the route table associated with the route being returned. If the route being returned does not correspond to any user created routes then this field will be the string 'System Route'. :paramtype route_table_id: str """ super().__init__(**kwargs) self.next_hop_type = next_hop_type self.next_hop_ip_address = next_hop_ip_address self.route_table_id = route_table_id class Operation(_serialization.Model): """Network REST API operation definition. :ivar name: Operation name: {provider}/{resource}/{operation}. :vartype name: str :ivar display: Display metadata associated with the operation. :vartype display: ~azure.mgmt.network.v2018_07_01.models.OperationDisplay :ivar origin: Origin of the operation. :vartype origin: str :ivar service_specification: Specification of the service. :vartype service_specification: ~azure.mgmt.network.v2018_07_01.models.OperationPropertiesFormatServiceSpecification """ _attribute_map = { "name": {"key": "name", "type": "str"}, "display": {"key": "display", "type": "OperationDisplay"}, "origin": {"key": "origin", "type": "str"}, "service_specification": { "key": "properties.serviceSpecification", "type": "OperationPropertiesFormatServiceSpecification", }, } def __init__( self, *, name: Optional[str] = None, display: Optional["_models.OperationDisplay"] = None, origin: Optional[str] = None, service_specification: Optional["_models.OperationPropertiesFormatServiceSpecification"] = None, **kwargs ): """ :keyword name: Operation name: {provider}/{resource}/{operation}. :paramtype name: str :keyword display: Display metadata associated with the operation. :paramtype display: ~azure.mgmt.network.v2018_07_01.models.OperationDisplay :keyword origin: Origin of the operation. :paramtype origin: str :keyword service_specification: Specification of the service. :paramtype service_specification: ~azure.mgmt.network.v2018_07_01.models.OperationPropertiesFormatServiceSpecification """ super().__init__(**kwargs) self.name = name self.display = display self.origin = origin self.service_specification = service_specification class OperationDisplay(_serialization.Model): """Display metadata associated with the operation. :ivar provider: Service provider: Microsoft Network. :vartype provider: str :ivar resource: Resource on which the operation is performed. :vartype resource: str :ivar operation: Type of the operation: get, read, delete, etc. :vartype operation: str :ivar description: Description of the operation. :vartype description: str """ _attribute_map = { "provider": {"key": "provider", "type": "str"}, "resource": {"key": "resource", "type": "str"}, "operation": {"key": "operation", "type": "str"}, "description": {"key": "description", "type": "str"}, } def __init__( self, *, provider: Optional[str] = None, resource: Optional[str] = None, operation: Optional[str] = None, description: Optional[str] = None, **kwargs ): """ :keyword provider: Service provider: Microsoft Network. :paramtype provider: str :keyword resource: Resource on which the operation is performed. :paramtype resource: str :keyword operation: Type of the operation: get, read, delete, etc. :paramtype operation: str :keyword description: Description of the operation. :paramtype description: str """ super().__init__(**kwargs) self.provider = provider self.resource = resource self.operation = operation self.description = description class OperationListResult(_serialization.Model): """Result of the request to list Network operations. It contains a list of operations and a URL link to get the next set of results. :ivar value: List of Network operations supported by the Network resource provider. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.Operation] :ivar next_link: URL to get the next set of operation list results if there are any. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[Operation]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.Operation"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: List of Network operations supported by the Network resource provider. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.Operation] :keyword next_link: URL to get the next set of operation list results if there are any. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class OperationPropertiesFormatServiceSpecification(_serialization.Model): """Specification of the service. :ivar metric_specifications: Operation service specification. :vartype metric_specifications: list[~azure.mgmt.network.v2018_07_01.models.MetricSpecification] :ivar log_specifications: Operation log specification. :vartype log_specifications: list[~azure.mgmt.network.v2018_07_01.models.LogSpecification] """ _attribute_map = { "metric_specifications": {"key": "metricSpecifications", "type": "[MetricSpecification]"}, "log_specifications": {"key": "logSpecifications", "type": "[LogSpecification]"}, } def __init__( self, *, metric_specifications: Optional[List["_models.MetricSpecification"]] = None, log_specifications: Optional[List["_models.LogSpecification"]] = None, **kwargs ): """ :keyword metric_specifications: Operation service specification. :paramtype metric_specifications: list[~azure.mgmt.network.v2018_07_01.models.MetricSpecification] :keyword log_specifications: Operation log specification. :paramtype log_specifications: list[~azure.mgmt.network.v2018_07_01.models.LogSpecification] """ super().__init__(**kwargs) self.metric_specifications = metric_specifications self.log_specifications = log_specifications class OutboundRule(SubResource): """Outbound rule of the load balancer. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar allocated_outbound_ports: The number of outbound ports to be used for NAT. :vartype allocated_outbound_ports: int :ivar frontend_ip_configurations: The Frontend IP addresses of the load balancer. :vartype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs. :vartype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar protocol: Protocol - TCP, UDP or All. Known values are: "Tcp", "Udp", and "All". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.OutboundRulePropertiesFormatProtocol :ivar enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :vartype enable_tcp_reset: bool :ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. :vartype idle_timeout_in_minutes: int """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "allocated_outbound_ports": {"key": "properties.allocatedOutboundPorts", "type": "int"}, "frontend_ip_configurations": {"key": "properties.frontendIPConfigurations", "type": "[SubResource]"}, "backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "protocol": {"key": "properties.protocol", "type": "str"}, "enable_tcp_reset": {"key": "properties.enableTcpReset", "type": "bool"}, "idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, allocated_outbound_ports: Optional[int] = None, frontend_ip_configurations: Optional[List["_models.SubResource"]] = None, backend_address_pool: Optional["_models.SubResource"] = None, provisioning_state: Optional[str] = None, protocol: Optional[Union[str, "_models.OutboundRulePropertiesFormatProtocol"]] = None, enable_tcp_reset: Optional[bool] = None, idle_timeout_in_minutes: Optional[int] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword allocated_outbound_ports: The number of outbound ports to be used for NAT. :paramtype allocated_outbound_ports: int :keyword frontend_ip_configurations: The Frontend IP addresses of the load balancer. :paramtype frontend_ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :keyword backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs. :paramtype backend_address_pool: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str :keyword protocol: Protocol - TCP, UDP or All. Known values are: "Tcp", "Udp", and "All". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.OutboundRulePropertiesFormatProtocol :keyword enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :paramtype enable_tcp_reset: bool :keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. :paramtype idle_timeout_in_minutes: int """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.allocated_outbound_ports = allocated_outbound_ports self.frontend_ip_configurations = frontend_ip_configurations self.backend_address_pool = backend_address_pool self.provisioning_state = provisioning_state self.protocol = protocol self.enable_tcp_reset = enable_tcp_reset self.idle_timeout_in_minutes = idle_timeout_in_minutes class PacketCapture(_serialization.Model): """Parameters that define the create packet capture operation. All required parameters must be populated in order to send to Azure. :ivar target: The ID of the targeted resource, only VM is currently supported. Required. :vartype target: str :ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :vartype bytes_to_capture_per_packet: int :ivar total_bytes_per_session: Maximum size of the capture output. :vartype total_bytes_per_session: int :ivar time_limit_in_seconds: Maximum duration of the capture session in seconds. :vartype time_limit_in_seconds: int :ivar storage_location: Describes the storage location for a packet capture session. Required. :vartype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :ivar filters: :vartype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] """ _validation = { "target": {"required": True}, "storage_location": {"required": True}, } _attribute_map = { "target": {"key": "properties.target", "type": "str"}, "bytes_to_capture_per_packet": {"key": "properties.bytesToCapturePerPacket", "type": "int"}, "total_bytes_per_session": {"key": "properties.totalBytesPerSession", "type": "int"}, "time_limit_in_seconds": {"key": "properties.timeLimitInSeconds", "type": "int"}, "storage_location": {"key": "properties.storageLocation", "type": "PacketCaptureStorageLocation"}, "filters": {"key": "properties.filters", "type": "[PacketCaptureFilter]"}, } def __init__( self, *, target: str, storage_location: "_models.PacketCaptureStorageLocation", bytes_to_capture_per_packet: int = 0, total_bytes_per_session: int = 1073741824, time_limit_in_seconds: int = 18000, filters: Optional[List["_models.PacketCaptureFilter"]] = None, **kwargs ): """ :keyword target: The ID of the targeted resource, only VM is currently supported. Required. :paramtype target: str :keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :paramtype bytes_to_capture_per_packet: int :keyword total_bytes_per_session: Maximum size of the capture output. :paramtype total_bytes_per_session: int :keyword time_limit_in_seconds: Maximum duration of the capture session in seconds. :paramtype time_limit_in_seconds: int :keyword storage_location: Describes the storage location for a packet capture session. Required. :paramtype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :keyword filters: :paramtype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] """ super().__init__(**kwargs) self.target = target self.bytes_to_capture_per_packet = bytes_to_capture_per_packet self.total_bytes_per_session = total_bytes_per_session self.time_limit_in_seconds = time_limit_in_seconds self.storage_location = storage_location self.filters = filters class PacketCaptureFilter(_serialization.Model): """Filter that is applied to packet capture request. Multiple filters can be applied. :ivar protocol: Protocol to be filtered on. Known values are: "TCP", "UDP", and "Any". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.PcProtocol :ivar local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :vartype local_ip_address: str :ivar remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :vartype remote_ip_address: str :ivar local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :vartype local_port: str :ivar remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :vartype remote_port: str """ _attribute_map = { "protocol": {"key": "protocol", "type": "str"}, "local_ip_address": {"key": "localIPAddress", "type": "str"}, "remote_ip_address": {"key": "remoteIPAddress", "type": "str"}, "local_port": {"key": "localPort", "type": "str"}, "remote_port": {"key": "remotePort", "type": "str"}, } def __init__( self, *, protocol: Union[str, "_models.PcProtocol"] = "Any", local_ip_address: Optional[str] = None, remote_ip_address: Optional[str] = None, local_port: Optional[str] = None, remote_port: Optional[str] = None, **kwargs ): """ :keyword protocol: Protocol to be filtered on. Known values are: "TCP", "UDP", and "Any". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.PcProtocol :keyword local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :paramtype local_ip_address: str :keyword remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :paramtype remote_ip_address: str :keyword local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :paramtype local_port: str :keyword remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null. :paramtype remote_port: str """ super().__init__(**kwargs) self.protocol = protocol self.local_ip_address = local_ip_address self.remote_ip_address = remote_ip_address self.local_port = local_port self.remote_port = remote_port class PacketCaptureListResult(_serialization.Model): """List of packet capture sessions. :ivar value: Information about packet capture sessions. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureResult] """ _attribute_map = { "value": {"key": "value", "type": "[PacketCaptureResult]"}, } def __init__(self, *, value: Optional[List["_models.PacketCaptureResult"]] = None, **kwargs): """ :keyword value: Information about packet capture sessions. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureResult] """ super().__init__(**kwargs) self.value = value class PacketCaptureParameters(_serialization.Model): """Parameters that define the create packet capture operation. All required parameters must be populated in order to send to Azure. :ivar target: The ID of the targeted resource, only VM is currently supported. Required. :vartype target: str :ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :vartype bytes_to_capture_per_packet: int :ivar total_bytes_per_session: Maximum size of the capture output. :vartype total_bytes_per_session: int :ivar time_limit_in_seconds: Maximum duration of the capture session in seconds. :vartype time_limit_in_seconds: int :ivar storage_location: Describes the storage location for a packet capture session. Required. :vartype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :ivar filters: :vartype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] """ _validation = { "target": {"required": True}, "storage_location": {"required": True}, } _attribute_map = { "target": {"key": "target", "type": "str"}, "bytes_to_capture_per_packet": {"key": "bytesToCapturePerPacket", "type": "int"}, "total_bytes_per_session": {"key": "totalBytesPerSession", "type": "int"}, "time_limit_in_seconds": {"key": "timeLimitInSeconds", "type": "int"}, "storage_location": {"key": "storageLocation", "type": "PacketCaptureStorageLocation"}, "filters": {"key": "filters", "type": "[PacketCaptureFilter]"}, } def __init__( self, *, target: str, storage_location: "_models.PacketCaptureStorageLocation", bytes_to_capture_per_packet: int = 0, total_bytes_per_session: int = 1073741824, time_limit_in_seconds: int = 18000, filters: Optional[List["_models.PacketCaptureFilter"]] = None, **kwargs ): """ :keyword target: The ID of the targeted resource, only VM is currently supported. Required. :paramtype target: str :keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :paramtype bytes_to_capture_per_packet: int :keyword total_bytes_per_session: Maximum size of the capture output. :paramtype total_bytes_per_session: int :keyword time_limit_in_seconds: Maximum duration of the capture session in seconds. :paramtype time_limit_in_seconds: int :keyword storage_location: Describes the storage location for a packet capture session. Required. :paramtype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :keyword filters: :paramtype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] """ super().__init__(**kwargs) self.target = target self.bytes_to_capture_per_packet = bytes_to_capture_per_packet self.total_bytes_per_session = total_bytes_per_session self.time_limit_in_seconds = time_limit_in_seconds self.storage_location = storage_location self.filters = filters class PacketCaptureQueryStatusResult(_serialization.Model): """Status of packet capture session. :ivar name: The name of the packet capture resource. :vartype name: str :ivar id: The ID of the packet capture resource. :vartype id: str :ivar capture_start_time: The start time of the packet capture session. :vartype capture_start_time: ~datetime.datetime :ivar packet_capture_status: The status of the packet capture session. Known values are: "NotStarted", "Running", "Stopped", "Error", and "Unknown". :vartype packet_capture_status: str or ~azure.mgmt.network.v2018_07_01.models.PcStatus :ivar stop_reason: The reason the current packet capture session was stopped. :vartype stop_reason: str :ivar packet_capture_error: List of errors of packet capture session. :vartype packet_capture_error: list[str or ~azure.mgmt.network.v2018_07_01.models.PcError] """ _attribute_map = { "name": {"key": "name", "type": "str"}, "id": {"key": "id", "type": "str"}, "capture_start_time": {"key": "captureStartTime", "type": "iso-8601"}, "packet_capture_status": {"key": "packetCaptureStatus", "type": "str"}, "stop_reason": {"key": "stopReason", "type": "str"}, "packet_capture_error": {"key": "packetCaptureError", "type": "[str]"}, } def __init__( self, *, name: Optional[str] = None, id: Optional[str] = None, # pylint: disable=redefined-builtin capture_start_time: Optional[datetime.datetime] = None, packet_capture_status: Optional[Union[str, "_models.PcStatus"]] = None, stop_reason: Optional[str] = None, packet_capture_error: Optional[List[Union[str, "_models.PcError"]]] = None, **kwargs ): """ :keyword name: The name of the packet capture resource. :paramtype name: str :keyword id: The ID of the packet capture resource. :paramtype id: str :keyword capture_start_time: The start time of the packet capture session. :paramtype capture_start_time: ~datetime.datetime :keyword packet_capture_status: The status of the packet capture session. Known values are: "NotStarted", "Running", "Stopped", "Error", and "Unknown". :paramtype packet_capture_status: str or ~azure.mgmt.network.v2018_07_01.models.PcStatus :keyword stop_reason: The reason the current packet capture session was stopped. :paramtype stop_reason: str :keyword packet_capture_error: List of errors of packet capture session. :paramtype packet_capture_error: list[str or ~azure.mgmt.network.v2018_07_01.models.PcError] """ super().__init__(**kwargs) self.name = name self.id = id self.capture_start_time = capture_start_time self.packet_capture_status = packet_capture_status self.stop_reason = stop_reason self.packet_capture_error = packet_capture_error class PacketCaptureResult(_serialization.Model): """Information about packet capture session. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the packet capture session. :vartype name: str :ivar id: ID of the packet capture operation. :vartype id: str :ivar etag: :vartype etag: str :ivar target: The ID of the targeted resource, only VM is currently supported. :vartype target: str :ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :vartype bytes_to_capture_per_packet: int :ivar total_bytes_per_session: Maximum size of the capture output. :vartype total_bytes_per_session: int :ivar time_limit_in_seconds: Maximum duration of the capture session in seconds. :vartype time_limit_in_seconds: int :ivar storage_location: Describes the storage location for a packet capture session. :vartype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :ivar filters: :vartype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] :ivar provisioning_state: The provisioning state of the packet capture session. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "id": {"readonly": True}, } _attribute_map = { "name": {"key": "name", "type": "str"}, "id": {"key": "id", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "target": {"key": "properties.target", "type": "str"}, "bytes_to_capture_per_packet": {"key": "properties.bytesToCapturePerPacket", "type": "int"}, "total_bytes_per_session": {"key": "properties.totalBytesPerSession", "type": "int"}, "time_limit_in_seconds": {"key": "properties.timeLimitInSeconds", "type": "int"}, "storage_location": {"key": "properties.storageLocation", "type": "PacketCaptureStorageLocation"}, "filters": {"key": "properties.filters", "type": "[PacketCaptureFilter]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, etag: str = "A unique read-only string that changes whenever the resource is updated.", target: Optional[str] = None, bytes_to_capture_per_packet: int = 0, total_bytes_per_session: int = 1073741824, time_limit_in_seconds: int = 18000, storage_location: Optional["_models.PacketCaptureStorageLocation"] = None, filters: Optional[List["_models.PacketCaptureFilter"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword etag: :paramtype etag: str :keyword target: The ID of the targeted resource, only VM is currently supported. :paramtype target: str :keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :paramtype bytes_to_capture_per_packet: int :keyword total_bytes_per_session: Maximum size of the capture output. :paramtype total_bytes_per_session: int :keyword time_limit_in_seconds: Maximum duration of the capture session in seconds. :paramtype time_limit_in_seconds: int :keyword storage_location: Describes the storage location for a packet capture session. :paramtype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :keyword filters: :paramtype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] :keyword provisioning_state: The provisioning state of the packet capture session. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(**kwargs) self.name = None self.id = None self.etag = etag self.target = target self.bytes_to_capture_per_packet = bytes_to_capture_per_packet self.total_bytes_per_session = total_bytes_per_session self.time_limit_in_seconds = time_limit_in_seconds self.storage_location = storage_location self.filters = filters self.provisioning_state = provisioning_state class PacketCaptureResultProperties(PacketCaptureParameters): """Describes the properties of a packet capture session. All required parameters must be populated in order to send to Azure. :ivar target: The ID of the targeted resource, only VM is currently supported. Required. :vartype target: str :ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :vartype bytes_to_capture_per_packet: int :ivar total_bytes_per_session: Maximum size of the capture output. :vartype total_bytes_per_session: int :ivar time_limit_in_seconds: Maximum duration of the capture session in seconds. :vartype time_limit_in_seconds: int :ivar storage_location: Describes the storage location for a packet capture session. Required. :vartype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :ivar filters: :vartype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] :ivar provisioning_state: The provisioning state of the packet capture session. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "target": {"required": True}, "storage_location": {"required": True}, } _attribute_map = { "target": {"key": "target", "type": "str"}, "bytes_to_capture_per_packet": {"key": "bytesToCapturePerPacket", "type": "int"}, "total_bytes_per_session": {"key": "totalBytesPerSession", "type": "int"}, "time_limit_in_seconds": {"key": "timeLimitInSeconds", "type": "int"}, "storage_location": {"key": "storageLocation", "type": "PacketCaptureStorageLocation"}, "filters": {"key": "filters", "type": "[PacketCaptureFilter]"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( self, *, target: str, storage_location: "_models.PacketCaptureStorageLocation", bytes_to_capture_per_packet: int = 0, total_bytes_per_session: int = 1073741824, time_limit_in_seconds: int = 18000, filters: Optional[List["_models.PacketCaptureFilter"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword target: The ID of the targeted resource, only VM is currently supported. Required. :paramtype target: str :keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are truncated. :paramtype bytes_to_capture_per_packet: int :keyword total_bytes_per_session: Maximum size of the capture output. :paramtype total_bytes_per_session: int :keyword time_limit_in_seconds: Maximum duration of the capture session in seconds. :paramtype time_limit_in_seconds: int :keyword storage_location: Describes the storage location for a packet capture session. Required. :paramtype storage_location: ~azure.mgmt.network.v2018_07_01.models.PacketCaptureStorageLocation :keyword filters: :paramtype filters: list[~azure.mgmt.network.v2018_07_01.models.PacketCaptureFilter] :keyword provisioning_state: The provisioning state of the packet capture session. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__( target=target, bytes_to_capture_per_packet=bytes_to_capture_per_packet, total_bytes_per_session=total_bytes_per_session, time_limit_in_seconds=time_limit_in_seconds, storage_location=storage_location, filters=filters, **kwargs ) self.provisioning_state = provisioning_state class PacketCaptureStorageLocation(_serialization.Model): """Describes the storage location for a packet capture session. :ivar storage_id: The ID of the storage account to save the packet capture session. Required if no local file path is provided. :vartype storage_id: str :ivar storage_path: The URI of the storage path to save the packet capture. Must be a well-formed URI describing the location to save the packet capture. :vartype storage_path: str :ivar file_path: A valid local path on the targeting VM. Must include the name of the capture file (*.cap). For linux virtual machine it must start with /var/captures. Required if no storage ID is provided, otherwise optional. :vartype file_path: str """ _attribute_map = { "storage_id": {"key": "storageId", "type": "str"}, "storage_path": {"key": "storagePath", "type": "str"}, "file_path": {"key": "filePath", "type": "str"}, } def __init__( self, *, storage_id: Optional[str] = None, storage_path: Optional[str] = None, file_path: Optional[str] = None, **kwargs ): """ :keyword storage_id: The ID of the storage account to save the packet capture session. Required if no local file path is provided. :paramtype storage_id: str :keyword storage_path: The URI of the storage path to save the packet capture. Must be a well-formed URI describing the location to save the packet capture. :paramtype storage_path: str :keyword file_path: A valid local path on the targeting VM. Must include the name of the capture file (*.cap). For linux virtual machine it must start with /var/captures. Required if no storage ID is provided, otherwise optional. :paramtype file_path: str """ super().__init__(**kwargs) self.storage_id = storage_id self.storage_path = storage_path self.file_path = file_path class PatchRouteFilter(SubResource): """Route Filter Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Resource type. :vartype type: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar rules: Collection of RouteFilterRules contained within a route filter. :vartype rules: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :ivar peerings: A collection of references to express route circuit peerings. :vartype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "etag": {"readonly": True}, "type": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "type": {"key": "type", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "rules": {"key": "properties.rules", "type": "[RouteFilterRule]"}, "peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin tags: Optional[Dict[str, str]] = None, rules: Optional[List["_models.RouteFilterRule"]] = None, peerings: Optional[List["_models.ExpressRouteCircuitPeering"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword rules: Collection of RouteFilterRules contained within a route filter. :paramtype rules: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :keyword peerings: A collection of references to express route circuit peerings. :paramtype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] """ super().__init__(id=id, **kwargs) self.name = None self.etag = None self.type = None self.tags = tags self.rules = rules self.peerings = peerings self.provisioning_state = None class PatchRouteFilterRule(SubResource): """Route Filter Rule Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are: "Allow" and "Deny". :vartype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :ivar route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'. "Community" :vartype route_filter_rule_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteFilterRuleType :ivar communities: The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']. :vartype communities: list[str] :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "etag": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "access": {"key": "properties.access", "type": "str"}, "route_filter_rule_type": {"key": "properties.routeFilterRuleType", "type": "str"}, "communities": {"key": "properties.communities", "type": "[str]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin access: Optional[Union[str, "_models.Access"]] = None, route_filter_rule_type: Optional[Union[str, "_models.RouteFilterRuleType"]] = None, communities: Optional[List[str]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are: "Allow" and "Deny". :paramtype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :keyword route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'. "Community" :paramtype route_filter_rule_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteFilterRuleType :keyword communities: The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']. :paramtype communities: list[str] """ super().__init__(id=id, **kwargs) self.name = None self.etag = None self.access = access self.route_filter_rule_type = route_filter_rule_type self.communities = communities self.provisioning_state = None class Policies(_serialization.Model): """Policies for vpn gateway. :ivar allow_branch_to_branch_traffic: True if branch to branch traffic is allowed. :vartype allow_branch_to_branch_traffic: bool :ivar allow_vnet_to_vnet_traffic: True if Vnet to Vnet traffic is allowed. :vartype allow_vnet_to_vnet_traffic: bool """ _attribute_map = { "allow_branch_to_branch_traffic": {"key": "allowBranchToBranchTraffic", "type": "bool"}, "allow_vnet_to_vnet_traffic": {"key": "allowVnetToVnetTraffic", "type": "bool"}, } def __init__( self, *, allow_branch_to_branch_traffic: Optional[bool] = None, allow_vnet_to_vnet_traffic: Optional[bool] = None, **kwargs ): """ :keyword allow_branch_to_branch_traffic: True if branch to branch traffic is allowed. :paramtype allow_branch_to_branch_traffic: bool :keyword allow_vnet_to_vnet_traffic: True if Vnet to Vnet traffic is allowed. :paramtype allow_vnet_to_vnet_traffic: bool """ super().__init__(**kwargs) self.allow_branch_to_branch_traffic = allow_branch_to_branch_traffic self.allow_vnet_to_vnet_traffic = allow_vnet_to_vnet_traffic class Probe(SubResource): """A load balancer probe. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar load_balancing_rules: The load balancer rules that use this probe. :vartype load_balancing_rules: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar protocol: The protocol of the end point. Possible values are: 'Http', 'Tcp', or 'Https'. If 'Tcp' is specified, a received ACK is required for the probe to be successful. If 'Http' or 'Https' is specified, a 200 OK response from the specifies URI is required for the probe to be successful. Known values are: "Http", "Tcp", and "Https". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ProbeProtocol :ivar port: The port for communicating the probe. Possible values range from 1 to 65535, inclusive. :vartype port: int :ivar interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5. :vartype interval_in_seconds: int :ivar number_of_probes: The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure. :vartype number_of_probes: int :ivar request_path: The URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value. :vartype request_path: str :ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "load_balancing_rules": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"}, "protocol": {"key": "properties.protocol", "type": "str"}, "port": {"key": "properties.port", "type": "int"}, "interval_in_seconds": {"key": "properties.intervalInSeconds", "type": "int"}, "number_of_probes": {"key": "properties.numberOfProbes", "type": "int"}, "request_path": {"key": "properties.requestPath", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, protocol: Optional[Union[str, "_models.ProbeProtocol"]] = None, port: Optional[int] = None, interval_in_seconds: Optional[int] = None, number_of_probes: Optional[int] = None, request_path: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword protocol: The protocol of the end point. Possible values are: 'Http', 'Tcp', or 'Https'. If 'Tcp' is specified, a received ACK is required for the probe to be successful. If 'Http' or 'Https' is specified, a 200 OK response from the specifies URI is required for the probe to be successful. Known values are: "Http", "Tcp", and "Https". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.ProbeProtocol :keyword port: The port for communicating the probe. Possible values range from 1 to 65535, inclusive. :paramtype port: int :keyword interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5. :paramtype interval_in_seconds: int :keyword number_of_probes: The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure. :paramtype number_of_probes: int :keyword request_path: The URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value. :paramtype request_path: str :keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.load_balancing_rules = None self.protocol = protocol self.port = port self.interval_in_seconds = interval_in_seconds self.number_of_probes = number_of_probes self.request_path = request_path self.provisioning_state = provisioning_state class ProtocolConfiguration(_serialization.Model): """Configuration of the protocol. :ivar http_configuration: HTTP configuration of the connectivity check. :vartype http_configuration: ~azure.mgmt.network.v2018_07_01.models.HTTPConfiguration """ _attribute_map = { "http_configuration": {"key": "HTTPConfiguration", "type": "HTTPConfiguration"}, } def __init__(self, *, http_configuration: Optional["_models.HTTPConfiguration"] = None, **kwargs): """ :keyword http_configuration: HTTP configuration of the connectivity check. :paramtype http_configuration: ~azure.mgmt.network.v2018_07_01.models.HTTPConfiguration """ super().__init__(**kwargs) self.http_configuration = http_configuration class PublicIPAddress(Resource): # pylint: disable=too-many-instance-attributes """Public IP address resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar sku: The public IP address SKU. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressSku :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :vartype zones: list[str] :ivar public_ip_allocation_method: The public IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :vartype public_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :vartype public_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :ivar ip_configuration: The IP configuration associated with the public IP address. :vartype ip_configuration: ~azure.mgmt.network.v2018_07_01.models.IPConfiguration :ivar dns_settings: The FQDN of the DNS record associated with the public IP address. :vartype dns_settings: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressDnsSettings :ivar ip_tags: The list of tags associated with the public IP address. :vartype ip_tags: list[~azure.mgmt.network.v2018_07_01.models.IpTag] :ivar ip_address: The IP address associated with the public IP address resource. :vartype ip_address: str :ivar public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from. :vartype public_ip_prefix: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar idle_timeout_in_minutes: The idle timeout of the public IP address. :vartype idle_timeout_in_minutes: int :ivar resource_guid: The resource GUID property of the public IP resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "ip_configuration": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "PublicIPAddressSku"}, "etag": {"key": "etag", "type": "str"}, "zones": {"key": "zones", "type": "[str]"}, "public_ip_allocation_method": {"key": "properties.publicIPAllocationMethod", "type": "str"}, "public_ip_address_version": {"key": "properties.publicIPAddressVersion", "type": "str"}, "ip_configuration": {"key": "properties.ipConfiguration", "type": "IPConfiguration"}, "dns_settings": {"key": "properties.dnsSettings", "type": "PublicIPAddressDnsSettings"}, "ip_tags": {"key": "properties.ipTags", "type": "[IpTag]"}, "ip_address": {"key": "properties.ipAddress", "type": "str"}, "public_ip_prefix": {"key": "properties.publicIPPrefix", "type": "SubResource"}, "idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.PublicIPAddressSku"] = None, etag: Optional[str] = None, zones: Optional[List[str]] = None, public_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, public_ip_address_version: Optional[Union[str, "_models.IPVersion"]] = None, dns_settings: Optional["_models.PublicIPAddressDnsSettings"] = None, ip_tags: Optional[List["_models.IpTag"]] = None, ip_address: Optional[str] = None, public_ip_prefix: Optional["_models.SubResource"] = None, idle_timeout_in_minutes: Optional[int] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword sku: The public IP address SKU. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressSku :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :paramtype zones: list[str] :keyword public_ip_allocation_method: The public IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :paramtype public_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :paramtype public_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :keyword dns_settings: The FQDN of the DNS record associated with the public IP address. :paramtype dns_settings: ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressDnsSettings :keyword ip_tags: The list of tags associated with the public IP address. :paramtype ip_tags: list[~azure.mgmt.network.v2018_07_01.models.IpTag] :keyword ip_address: The IP address associated with the public IP address resource. :paramtype ip_address: str :keyword public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from. :paramtype public_ip_prefix: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword idle_timeout_in_minutes: The idle timeout of the public IP address. :paramtype idle_timeout_in_minutes: int :keyword resource_guid: The resource GUID property of the public IP resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.sku = sku self.etag = etag self.zones = zones self.public_ip_allocation_method = public_ip_allocation_method self.public_ip_address_version = public_ip_address_version self.ip_configuration = None self.dns_settings = dns_settings self.ip_tags = ip_tags self.ip_address = ip_address self.public_ip_prefix = public_ip_prefix self.idle_timeout_in_minutes = idle_timeout_in_minutes self.resource_guid = resource_guid self.provisioning_state = provisioning_state class PublicIPAddressDnsSettings(_serialization.Model): """Contains FQDN of the DNS record associated with the public IP address. :ivar domain_name_label: Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system. :vartype domain_name_label: str :ivar fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone. :vartype fqdn: str :ivar reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN. :vartype reverse_fqdn: str """ _attribute_map = { "domain_name_label": {"key": "domainNameLabel", "type": "str"}, "fqdn": {"key": "fqdn", "type": "str"}, "reverse_fqdn": {"key": "reverseFqdn", "type": "str"}, } def __init__( self, *, domain_name_label: Optional[str] = None, fqdn: Optional[str] = None, reverse_fqdn: Optional[str] = None, **kwargs ): """ :keyword domain_name_label: Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system. :paramtype domain_name_label: str :keyword fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone. :paramtype fqdn: str :keyword reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN. :paramtype reverse_fqdn: str """ super().__init__(**kwargs) self.domain_name_label = domain_name_label self.fqdn = fqdn self.reverse_fqdn = reverse_fqdn class PublicIPAddressListResult(_serialization.Model): """Response for ListPublicIpAddresses API service call. :ivar value: A list of public IP addresses that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.PublicIPAddress] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[PublicIPAddress]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.PublicIPAddress"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of public IP addresses that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.PublicIPAddress] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class PublicIPAddressSku(_serialization.Model): """SKU of a public IP address. :ivar name: Name of a public IP address SKU. Known values are: "Basic" and "Standard". :vartype name: str or ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressSkuName """ _attribute_map = { "name": {"key": "name", "type": "str"}, } def __init__(self, *, name: Optional[Union[str, "_models.PublicIPAddressSkuName"]] = None, **kwargs): """ :keyword name: Name of a public IP address SKU. Known values are: "Basic" and "Standard". :paramtype name: str or ~azure.mgmt.network.v2018_07_01.models.PublicIPAddressSkuName """ super().__init__(**kwargs) self.name = name class PublicIPPrefix(Resource): # pylint: disable=too-many-instance-attributes """Public IP prefix resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar sku: The public IP prefix SKU. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.PublicIPPrefixSku :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :vartype zones: list[str] :ivar public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :vartype public_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :ivar ip_tags: The list of tags associated with the public IP prefix. :vartype ip_tags: list[~azure.mgmt.network.v2018_07_01.models.IpTag] :ivar prefix_length: The Length of the Public IP Prefix. :vartype prefix_length: int :ivar ip_prefix: The allocated Prefix. :vartype ip_prefix: str :ivar public_ip_addresses: The list of all referenced PublicIPAddresses. :vartype public_ip_addresses: list[~azure.mgmt.network.v2018_07_01.models.ReferencedPublicIpAddress] :ivar load_balancer_frontend_ip_configuration: The reference to load balancer frontend IP configuration associated with the public IP prefix. :vartype load_balancer_frontend_ip_configuration: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar resource_guid: The resource GUID property of the public IP prefix resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the Public IP prefix resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "load_balancer_frontend_ip_configuration": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "PublicIPPrefixSku"}, "etag": {"key": "etag", "type": "str"}, "zones": {"key": "zones", "type": "[str]"}, "public_ip_address_version": {"key": "properties.publicIPAddressVersion", "type": "str"}, "ip_tags": {"key": "properties.ipTags", "type": "[IpTag]"}, "prefix_length": {"key": "properties.prefixLength", "type": "int"}, "ip_prefix": {"key": "properties.ipPrefix", "type": "str"}, "public_ip_addresses": {"key": "properties.publicIPAddresses", "type": "[ReferencedPublicIpAddress]"}, "load_balancer_frontend_ip_configuration": { "key": "properties.loadBalancerFrontendIpConfiguration", "type": "SubResource", }, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.PublicIPPrefixSku"] = None, etag: Optional[str] = None, zones: Optional[List[str]] = None, public_ip_address_version: Optional[Union[str, "_models.IPVersion"]] = None, ip_tags: Optional[List["_models.IpTag"]] = None, prefix_length: Optional[int] = None, ip_prefix: Optional[str] = None, public_ip_addresses: Optional[List["_models.ReferencedPublicIpAddress"]] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword sku: The public IP prefix SKU. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.PublicIPPrefixSku :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword zones: A list of availability zones denoting the IP allocated for the resource needs to come from. :paramtype zones: list[str] :keyword public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6". :paramtype public_ip_address_version: str or ~azure.mgmt.network.v2018_07_01.models.IPVersion :keyword ip_tags: The list of tags associated with the public IP prefix. :paramtype ip_tags: list[~azure.mgmt.network.v2018_07_01.models.IpTag] :keyword prefix_length: The Length of the Public IP Prefix. :paramtype prefix_length: int :keyword ip_prefix: The allocated Prefix. :paramtype ip_prefix: str :keyword public_ip_addresses: The list of all referenced PublicIPAddresses. :paramtype public_ip_addresses: list[~azure.mgmt.network.v2018_07_01.models.ReferencedPublicIpAddress] :keyword resource_guid: The resource GUID property of the public IP prefix resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the Public IP prefix resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.sku = sku self.etag = etag self.zones = zones self.public_ip_address_version = public_ip_address_version self.ip_tags = ip_tags self.prefix_length = prefix_length self.ip_prefix = ip_prefix self.public_ip_addresses = public_ip_addresses self.load_balancer_frontend_ip_configuration = None self.resource_guid = resource_guid self.provisioning_state = provisioning_state class PublicIPPrefixListResult(_serialization.Model): """Response for ListPublicIpPrefixes API service call. :ivar value: A list of public IP prefixes that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.PublicIPPrefix] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[PublicIPPrefix]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.PublicIPPrefix"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: A list of public IP prefixes that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.PublicIPPrefix] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class PublicIPPrefixSku(_serialization.Model): """SKU of a public IP prefix. :ivar name: Name of a public IP prefix SKU. "Standard" :vartype name: str or ~azure.mgmt.network.v2018_07_01.models.PublicIPPrefixSkuName """ _attribute_map = { "name": {"key": "name", "type": "str"}, } def __init__(self, *, name: Optional[Union[str, "_models.PublicIPPrefixSkuName"]] = None, **kwargs): """ :keyword name: Name of a public IP prefix SKU. "Standard" :paramtype name: str or ~azure.mgmt.network.v2018_07_01.models.PublicIPPrefixSkuName """ super().__init__(**kwargs) self.name = name class QueryTroubleshootingParameters(_serialization.Model): """Parameters that define the resource to query the troubleshooting result. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The target resource ID to query the troubleshooting result. Required. :vartype target_resource_id: str """ _validation = { "target_resource_id": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, } def __init__(self, *, target_resource_id: str, **kwargs): """ :keyword target_resource_id: The target resource ID to query the troubleshooting result. Required. :paramtype target_resource_id: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id class ReferencedPublicIpAddress(_serialization.Model): """ReferencedPublicIpAddress. :ivar id: The PublicIPAddress Reference. :vartype id: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, } def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: The PublicIPAddress Reference. :paramtype id: str """ super().__init__(**kwargs) self.id = id class ResourceNavigationLink(SubResource): """ResourceNavigationLink resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar linked_resource_type: Resource type of the linked resource. :vartype linked_resource_type: str :ivar link: Link to the external resource. :vartype link: str :ivar provisioning_state: Provisioning state of the ResourceNavigationLink resource. :vartype provisioning_state: str """ _validation = { "etag": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "linked_resource_type": {"key": "properties.linkedResourceType", "type": "str"}, "link": {"key": "properties.link", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, linked_resource_type: Optional[str] = None, link: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword linked_resource_type: Resource type of the linked resource. :paramtype linked_resource_type: str :keyword link: Link to the external resource. :paramtype link: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.linked_resource_type = linked_resource_type self.link = link self.provisioning_state = None class RetentionPolicyParameters(_serialization.Model): """Parameters that define the retention policy for flow log. :ivar days: Number of days to retain flow log records. :vartype days: int :ivar enabled: Flag to enable/disable retention. :vartype enabled: bool """ _attribute_map = { "days": {"key": "days", "type": "int"}, "enabled": {"key": "enabled", "type": "bool"}, } def __init__(self, *, days: int = 0, enabled: bool = False, **kwargs): """ :keyword days: Number of days to retain flow log records. :paramtype days: int :keyword enabled: Flag to enable/disable retention. :paramtype enabled: bool """ super().__init__(**kwargs) self.days = days self.enabled = enabled class Route(SubResource): """Route resource. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar address_prefix: The destination CIDR to which the route applies. :vartype address_prefix: str :ivar next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None". :vartype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteNextHopType :ivar next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance. :vartype next_hop_ip_address: str :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "address_prefix": {"key": "properties.addressPrefix", "type": "str"}, "next_hop_type": {"key": "properties.nextHopType", "type": "str"}, "next_hop_ip_address": {"key": "properties.nextHopIpAddress", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, address_prefix: Optional[str] = None, next_hop_type: Optional[Union[str, "_models.RouteNextHopType"]] = None, next_hop_ip_address: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword address_prefix: The destination CIDR to which the route applies. :paramtype address_prefix: str :keyword next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None". :paramtype next_hop_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteNextHopType :keyword next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance. :paramtype next_hop_ip_address: str :keyword provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.address_prefix = address_prefix self.next_hop_type = next_hop_type self.next_hop_ip_address = next_hop_ip_address self.provisioning_state = provisioning_state class RouteFilter(Resource): """Route Filter Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar rules: Collection of RouteFilterRules contained within a route filter. :vartype rules: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :ivar peerings: A collection of references to express route circuit peerings. :vartype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "rules": {"key": "properties.rules", "type": "[RouteFilterRule]"}, "peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, rules: Optional[List["_models.RouteFilterRule"]] = None, peerings: Optional[List["_models.ExpressRouteCircuitPeering"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword rules: Collection of RouteFilterRules contained within a route filter. :paramtype rules: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :keyword peerings: A collection of references to express route circuit peerings. :paramtype peerings: list[~azure.mgmt.network.v2018_07_01.models.ExpressRouteCircuitPeering] """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.rules = rules self.peerings = peerings self.provisioning_state = None class RouteFilterListResult(_serialization.Model): """Response for the ListRouteFilters API service call. :ivar value: Gets a list of route filters in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.RouteFilter] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[RouteFilter]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.RouteFilter"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: Gets a list of route filters in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.RouteFilter] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class RouteFilterRule(SubResource): """Route Filter Rule Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar location: Resource location. :vartype location: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are: "Allow" and "Deny". :vartype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :ivar route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'. "Community" :vartype route_filter_rule_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteFilterRuleType :ivar communities: The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']. :vartype communities: list[str] :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'. :vartype provisioning_state: str """ _validation = { "etag": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "location": {"key": "location", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "access": {"key": "properties.access", "type": "str"}, "route_filter_rule_type": {"key": "properties.routeFilterRuleType", "type": "str"}, "communities": {"key": "properties.communities", "type": "[str]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, location: Optional[str] = None, access: Optional[Union[str, "_models.Access"]] = None, route_filter_rule_type: Optional[Union[str, "_models.RouteFilterRuleType"]] = None, communities: Optional[List[str]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword location: Resource location. :paramtype location: str :keyword access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are: "Allow" and "Deny". :paramtype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :keyword route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'. "Community" :paramtype route_filter_rule_type: str or ~azure.mgmt.network.v2018_07_01.models.RouteFilterRuleType :keyword communities: The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']. :paramtype communities: list[str] """ super().__init__(id=id, **kwargs) self.name = name self.location = location self.etag = None self.access = access self.route_filter_rule_type = route_filter_rule_type self.communities = communities self.provisioning_state = None class RouteFilterRuleListResult(_serialization.Model): """Response for the ListRouteFilterRules API service call. :ivar value: Gets a list of RouteFilterRules in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[RouteFilterRule]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.RouteFilterRule"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: Gets a list of RouteFilterRules in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.RouteFilterRule] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class RouteListResult(_serialization.Model): """Response for the ListRoute API service call. :ivar value: Gets a list of routes in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.Route] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[Route]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.Route"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: Gets a list of routes in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.Route] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class RouteTable(Resource): """Route table resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar routes: Collection of routes contained within a route table. :vartype routes: list[~azure.mgmt.network.v2018_07_01.models.Route] :ivar subnets: A collection of references to subnets. :vartype subnets: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :ivar disable_bgp_route_propagation: Gets or sets whether to disable the routes learned by BGP on that route table. True means disable. :vartype disable_bgp_route_propagation: bool :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "subnets": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "routes": {"key": "properties.routes", "type": "[Route]"}, "subnets": {"key": "properties.subnets", "type": "[Subnet]"}, "disable_bgp_route_propagation": {"key": "properties.disableBgpRoutePropagation", "type": "bool"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, routes: Optional[List["_models.Route"]] = None, disable_bgp_route_propagation: Optional[bool] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: Gets a unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword routes: Collection of routes contained within a route table. :paramtype routes: list[~azure.mgmt.network.v2018_07_01.models.Route] :keyword disable_bgp_route_propagation: Gets or sets whether to disable the routes learned by BGP on that route table. True means disable. :paramtype disable_bgp_route_propagation: bool :keyword provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.routes = routes self.subnets = None self.disable_bgp_route_propagation = disable_bgp_route_propagation self.provisioning_state = provisioning_state class RouteTableListResult(_serialization.Model): """Response for the ListRouteTable API service call. :ivar value: Gets a list of route tables in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.RouteTable] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[RouteTable]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.RouteTable"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: Gets a list of route tables in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.RouteTable] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class SecurityGroupNetworkInterface(_serialization.Model): """Network interface and all its associated security rules. :ivar id: ID of the network interface. :vartype id: str :ivar security_rule_associations: All security rules associated with the network interface. :vartype security_rule_associations: ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAssociations """ _attribute_map = { "id": {"key": "id", "type": "str"}, "security_rule_associations": {"key": "securityRuleAssociations", "type": "SecurityRuleAssociations"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin security_rule_associations: Optional["_models.SecurityRuleAssociations"] = None, **kwargs ): """ :keyword id: ID of the network interface. :paramtype id: str :keyword security_rule_associations: All security rules associated with the network interface. :paramtype security_rule_associations: ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAssociations """ super().__init__(**kwargs) self.id = id self.security_rule_associations = security_rule_associations class SecurityGroupViewParameters(_serialization.Model): """Parameters that define the VM to check security groups for. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: ID of the target VM. Required. :vartype target_resource_id: str """ _validation = { "target_resource_id": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, } def __init__(self, *, target_resource_id: str, **kwargs): """ :keyword target_resource_id: ID of the target VM. Required. :paramtype target_resource_id: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id class SecurityGroupViewResult(_serialization.Model): """The information about security rules applied to the specified VM. :ivar network_interfaces: List of network interfaces on the specified VM. :vartype network_interfaces: list[~azure.mgmt.network.v2018_07_01.models.SecurityGroupNetworkInterface] """ _attribute_map = { "network_interfaces": {"key": "networkInterfaces", "type": "[SecurityGroupNetworkInterface]"}, } def __init__(self, *, network_interfaces: Optional[List["_models.SecurityGroupNetworkInterface"]] = None, **kwargs): """ :keyword network_interfaces: List of network interfaces on the specified VM. :paramtype network_interfaces: list[~azure.mgmt.network.v2018_07_01.models.SecurityGroupNetworkInterface] """ super().__init__(**kwargs) self.network_interfaces = network_interfaces class SecurityRule(SubResource): # pylint: disable=too-many-instance-attributes """Network security rule. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar description: A description for this rule. Restricted to 140 chars. :vartype description: str :ivar protocol: Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'. Known values are: "Tcp", "Udp", and "*". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleProtocol :ivar source_port_range: The source port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports. :vartype source_port_range: str :ivar destination_port_range: The destination port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports. :vartype destination_port_range: str :ivar source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from. :vartype source_address_prefix: str :ivar source_address_prefixes: The CIDR or source IP ranges. :vartype source_address_prefixes: list[str] :ivar source_application_security_groups: The application security group specified as source. :vartype source_application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :ivar destination_address_prefix: The destination address prefix. CIDR or destination IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. :vartype destination_address_prefix: str :ivar destination_address_prefixes: The destination address prefixes. CIDR or destination IP ranges. :vartype destination_address_prefixes: list[str] :ivar destination_application_security_groups: The application security group specified as destination. :vartype destination_application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :ivar source_port_ranges: The source port ranges. :vartype source_port_ranges: list[str] :ivar destination_port_ranges: The destination port ranges. :vartype destination_port_ranges: list[str] :ivar access: The network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :vartype access: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess :ivar priority: The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule. :vartype priority: int :ivar direction: The direction of the rule. The direction specifies if rule will be evaluated on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'. Known values are: "Inbound" and "Outbound". :vartype direction: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleDirection :ivar provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "description": {"key": "properties.description", "type": "str"}, "protocol": {"key": "properties.protocol", "type": "str"}, "source_port_range": {"key": "properties.sourcePortRange", "type": "str"}, "destination_port_range": {"key": "properties.destinationPortRange", "type": "str"}, "source_address_prefix": {"key": "properties.sourceAddressPrefix", "type": "str"}, "source_address_prefixes": {"key": "properties.sourceAddressPrefixes", "type": "[str]"}, "source_application_security_groups": { "key": "properties.sourceApplicationSecurityGroups", "type": "[ApplicationSecurityGroup]", }, "destination_address_prefix": {"key": "properties.destinationAddressPrefix", "type": "str"}, "destination_address_prefixes": {"key": "properties.destinationAddressPrefixes", "type": "[str]"}, "destination_application_security_groups": { "key": "properties.destinationApplicationSecurityGroups", "type": "[ApplicationSecurityGroup]", }, "source_port_ranges": {"key": "properties.sourcePortRanges", "type": "[str]"}, "destination_port_ranges": {"key": "properties.destinationPortRanges", "type": "[str]"}, "access": {"key": "properties.access", "type": "str"}, "priority": {"key": "properties.priority", "type": "int"}, "direction": {"key": "properties.direction", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, description: Optional[str] = None, protocol: Optional[Union[str, "_models.SecurityRuleProtocol"]] = None, source_port_range: Optional[str] = None, destination_port_range: Optional[str] = None, source_address_prefix: Optional[str] = None, source_address_prefixes: Optional[List[str]] = None, source_application_security_groups: Optional[List["_models.ApplicationSecurityGroup"]] = None, destination_address_prefix: Optional[str] = None, destination_address_prefixes: Optional[List[str]] = None, destination_application_security_groups: Optional[List["_models.ApplicationSecurityGroup"]] = None, source_port_ranges: Optional[List[str]] = None, destination_port_ranges: Optional[List[str]] = None, access: Optional[Union[str, "_models.SecurityRuleAccess"]] = None, priority: Optional[int] = None, direction: Optional[Union[str, "_models.SecurityRuleDirection"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword description: A description for this rule. Restricted to 140 chars. :paramtype description: str :keyword protocol: Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'. Known values are: "Tcp", "Udp", and "*". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleProtocol :keyword source_port_range: The source port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports. :paramtype source_port_range: str :keyword destination_port_range: The destination port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports. :paramtype destination_port_range: str :keyword source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from. :paramtype source_address_prefix: str :keyword source_address_prefixes: The CIDR or source IP ranges. :paramtype source_address_prefixes: list[str] :keyword source_application_security_groups: The application security group specified as source. :paramtype source_application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :keyword destination_address_prefix: The destination address prefix. CIDR or destination IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. :paramtype destination_address_prefix: str :keyword destination_address_prefixes: The destination address prefixes. CIDR or destination IP ranges. :paramtype destination_address_prefixes: list[str] :keyword destination_application_security_groups: The application security group specified as destination. :paramtype destination_application_security_groups: list[~azure.mgmt.network.v2018_07_01.models.ApplicationSecurityGroup] :keyword source_port_ranges: The source port ranges. :paramtype source_port_ranges: list[str] :keyword destination_port_ranges: The destination port ranges. :paramtype destination_port_ranges: list[str] :keyword access: The network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Known values are: "Allow" and "Deny". :paramtype access: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleAccess :keyword priority: The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule. :paramtype priority: int :keyword direction: The direction of the rule. The direction specifies if rule will be evaluated on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'. Known values are: "Inbound" and "Outbound". :paramtype direction: str or ~azure.mgmt.network.v2018_07_01.models.SecurityRuleDirection :keyword provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.description = description self.protocol = protocol self.source_port_range = source_port_range self.destination_port_range = destination_port_range self.source_address_prefix = source_address_prefix self.source_address_prefixes = source_address_prefixes self.source_application_security_groups = source_application_security_groups self.destination_address_prefix = destination_address_prefix self.destination_address_prefixes = destination_address_prefixes self.destination_application_security_groups = destination_application_security_groups self.source_port_ranges = source_port_ranges self.destination_port_ranges = destination_port_ranges self.access = access self.priority = priority self.direction = direction self.provisioning_state = provisioning_state class SecurityRuleAssociations(_serialization.Model): """All security rules associated with the network interface. :ivar network_interface_association: Network interface and its custom security rules. :vartype network_interface_association: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceAssociation :ivar subnet_association: Network interface and its custom security rules. :vartype subnet_association: ~azure.mgmt.network.v2018_07_01.models.SubnetAssociation :ivar default_security_rules: Collection of default security rules of the network security group. :vartype default_security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :ivar effective_security_rules: Collection of effective security rules. :vartype effective_security_rules: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityRule] """ _attribute_map = { "network_interface_association": {"key": "networkInterfaceAssociation", "type": "NetworkInterfaceAssociation"}, "subnet_association": {"key": "subnetAssociation", "type": "SubnetAssociation"}, "default_security_rules": {"key": "defaultSecurityRules", "type": "[SecurityRule]"}, "effective_security_rules": {"key": "effectiveSecurityRules", "type": "[EffectiveNetworkSecurityRule]"}, } def __init__( self, *, network_interface_association: Optional["_models.NetworkInterfaceAssociation"] = None, subnet_association: Optional["_models.SubnetAssociation"] = None, default_security_rules: Optional[List["_models.SecurityRule"]] = None, effective_security_rules: Optional[List["_models.EffectiveNetworkSecurityRule"]] = None, **kwargs ): """ :keyword network_interface_association: Network interface and its custom security rules. :paramtype network_interface_association: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceAssociation :keyword subnet_association: Network interface and its custom security rules. :paramtype subnet_association: ~azure.mgmt.network.v2018_07_01.models.SubnetAssociation :keyword default_security_rules: Collection of default security rules of the network security group. :paramtype default_security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :keyword effective_security_rules: Collection of effective security rules. :paramtype effective_security_rules: list[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityRule] """ super().__init__(**kwargs) self.network_interface_association = network_interface_association self.subnet_association = subnet_association self.default_security_rules = default_security_rules self.effective_security_rules = effective_security_rules class SecurityRuleListResult(_serialization.Model): """Response for ListSecurityRule API service call. Retrieves all security rules that belongs to a network security group. :ivar value: The security rules in a network security group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[SecurityRule]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.SecurityRule"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The security rules in a network security group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ServiceEndpointPolicy(Resource): """Service End point policy resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar service_endpoint_policy_definitions: A collection of service endpoint policy definitions of the service endpoint policy. :vartype service_endpoint_policy_definitions: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicyDefinition] :ivar resource_guid: The resource GUID property of the service endpoint policy resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the service endpoint policy. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "service_endpoint_policy_definitions": { "key": "properties.serviceEndpointPolicyDefinitions", "type": "[ServiceEndpointPolicyDefinition]", }, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, service_endpoint_policy_definitions: Optional[List["_models.ServiceEndpointPolicyDefinition"]] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword service_endpoint_policy_definitions: A collection of service endpoint policy definitions of the service endpoint policy. :paramtype service_endpoint_policy_definitions: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicyDefinition] :keyword resource_guid: The resource GUID property of the service endpoint policy resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the service endpoint policy. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.service_endpoint_policy_definitions = service_endpoint_policy_definitions self.resource_guid = resource_guid self.provisioning_state = provisioning_state class ServiceEndpointPolicyDefinition(SubResource): """Service Endpoint policy definitions. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar description: A description for this rule. Restricted to 140 chars. :vartype description: str :ivar service: service endpoint name. :vartype service: str :ivar service_resources: A list of service resources. :vartype service_resources: list[str] :ivar provisioning_state: The provisioning state of the service end point policy definition. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "description": {"key": "properties.description", "type": "str"}, "service": {"key": "properties.service", "type": "str"}, "service_resources": {"key": "properties.serviceResources", "type": "[str]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, description: Optional[str] = None, service: Optional[str] = None, service_resources: Optional[List[str]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword description: A description for this rule. Restricted to 140 chars. :paramtype description: str :keyword service: service endpoint name. :paramtype service: str :keyword service_resources: A list of service resources. :paramtype service_resources: list[str] :keyword provisioning_state: The provisioning state of the service end point policy definition. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.description = description self.service = service self.service_resources = service_resources self.provisioning_state = provisioning_state class ServiceEndpointPolicyDefinitionListResult(_serialization.Model): """Response for ListServiceEndpointPolicyDefinition API service call. Retrieves all service endpoint policy definition that belongs to a service endpoint policy. :ivar value: The service endpoint policy definition in a service endpoint policy. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicyDefinition] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[ServiceEndpointPolicyDefinition]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.ServiceEndpointPolicyDefinition"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The service endpoint policy definition in a service endpoint policy. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicyDefinition] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class ServiceEndpointPolicyListResult(_serialization.Model): """Response for ListServiceEndpointPolicies API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of ServiceEndpointPolicy resources. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[ServiceEndpointPolicy]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.ServiceEndpointPolicy"]] = None, **kwargs): """ :keyword value: A list of ServiceEndpointPolicy resources. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy] """ super().__init__(**kwargs) self.value = value self.next_link = None class ServiceEndpointPropertiesFormat(_serialization.Model): """The service endpoint properties. :ivar service: The type of the endpoint service. :vartype service: str :ivar locations: A list of locations. :vartype locations: list[str] :ivar provisioning_state: The provisioning state of the resource. :vartype provisioning_state: str """ _attribute_map = { "service": {"key": "service", "type": "str"}, "locations": {"key": "locations", "type": "[str]"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( self, *, service: Optional[str] = None, locations: Optional[List[str]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword service: The type of the endpoint service. :paramtype service: str :keyword locations: A list of locations. :paramtype locations: list[str] :keyword provisioning_state: The provisioning state of the resource. :paramtype provisioning_state: str """ super().__init__(**kwargs) self.service = service self.locations = locations self.provisioning_state = provisioning_state class Subnet(SubResource): # pylint: disable=too-many-instance-attributes """Subnet in a virtual network resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar address_prefix: The address prefix for the subnet. :vartype address_prefix: str :ivar network_security_group: The reference of the NetworkSecurityGroup resource. :vartype network_security_group: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup :ivar route_table: The reference of the RouteTable resource. :vartype route_table: ~azure.mgmt.network.v2018_07_01.models.RouteTable :ivar service_endpoints: An array of service endpoints. :vartype service_endpoints: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPropertiesFormat] :ivar service_endpoint_policies: An array of service endpoint policies. :vartype service_endpoint_policies: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy] :ivar ip_configurations: Gets an array of references to the network interface IP configurations using subnet. :vartype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.IPConfiguration] :ivar resource_navigation_links: Gets an array of references to the external resources using subnet. :vartype resource_navigation_links: list[~azure.mgmt.network.v2018_07_01.models.ResourceNavigationLink] :ivar provisioning_state: The provisioning state of the resource. :vartype provisioning_state: str """ _validation = { "ip_configurations": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "address_prefix": {"key": "properties.addressPrefix", "type": "str"}, "network_security_group": {"key": "properties.networkSecurityGroup", "type": "NetworkSecurityGroup"}, "route_table": {"key": "properties.routeTable", "type": "RouteTable"}, "service_endpoints": {"key": "properties.serviceEndpoints", "type": "[ServiceEndpointPropertiesFormat]"}, "service_endpoint_policies": {"key": "properties.serviceEndpointPolicies", "type": "[ServiceEndpointPolicy]"}, "ip_configurations": {"key": "properties.ipConfigurations", "type": "[IPConfiguration]"}, "resource_navigation_links": {"key": "properties.resourceNavigationLinks", "type": "[ResourceNavigationLink]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, address_prefix: Optional[str] = None, network_security_group: Optional["_models.NetworkSecurityGroup"] = None, route_table: Optional["_models.RouteTable"] = None, service_endpoints: Optional[List["_models.ServiceEndpointPropertiesFormat"]] = None, service_endpoint_policies: Optional[List["_models.ServiceEndpointPolicy"]] = None, resource_navigation_links: Optional[List["_models.ResourceNavigationLink"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword address_prefix: The address prefix for the subnet. :paramtype address_prefix: str :keyword network_security_group: The reference of the NetworkSecurityGroup resource. :paramtype network_security_group: ~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup :keyword route_table: The reference of the RouteTable resource. :paramtype route_table: ~azure.mgmt.network.v2018_07_01.models.RouteTable :keyword service_endpoints: An array of service endpoints. :paramtype service_endpoints: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPropertiesFormat] :keyword service_endpoint_policies: An array of service endpoint policies. :paramtype service_endpoint_policies: list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy] :keyword resource_navigation_links: Gets an array of references to the external resources using subnet. :paramtype resource_navigation_links: list[~azure.mgmt.network.v2018_07_01.models.ResourceNavigationLink] :keyword provisioning_state: The provisioning state of the resource. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.address_prefix = address_prefix self.network_security_group = network_security_group self.route_table = route_table self.service_endpoints = service_endpoints self.service_endpoint_policies = service_endpoint_policies self.ip_configurations = None self.resource_navigation_links = resource_navigation_links self.provisioning_state = provisioning_state class SubnetAssociation(_serialization.Model): """Network interface and its custom security rules. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Subnet ID. :vartype id: str :ivar security_rules: Collection of custom security rules. :vartype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] """ _validation = { "id": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "security_rules": {"key": "securityRules", "type": "[SecurityRule]"}, } def __init__(self, *, security_rules: Optional[List["_models.SecurityRule"]] = None, **kwargs): """ :keyword security_rules: Collection of custom security rules. :paramtype security_rules: list[~azure.mgmt.network.v2018_07_01.models.SecurityRule] """ super().__init__(**kwargs) self.id = None self.security_rules = security_rules class SubnetListResult(_serialization.Model): """Response for ListSubnets API service callRetrieves all subnet that belongs to a virtual network. :ivar value: The subnets in a virtual network. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[Subnet]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.Subnet"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: The subnets in a virtual network. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class TagsObject(_serialization.Model): """Tags object for patch operations. :ivar tags: Resource tags. :vartype tags: dict[str, str] """ _attribute_map = { "tags": {"key": "tags", "type": "{str}"}, } def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ super().__init__(**kwargs) self.tags = tags class Topology(_serialization.Model): """Topology of the specified resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: GUID representing the operation id. :vartype id: str :ivar created_date_time: The datetime when the topology was initially created for the resource group. :vartype created_date_time: ~datetime.datetime :ivar last_modified: The datetime when the topology was last modified. :vartype last_modified: ~datetime.datetime :ivar resources: :vartype resources: list[~azure.mgmt.network.v2018_07_01.models.TopologyResource] """ _validation = { "id": {"readonly": True}, "created_date_time": {"readonly": True}, "last_modified": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, "last_modified": {"key": "lastModified", "type": "iso-8601"}, "resources": {"key": "resources", "type": "[TopologyResource]"}, } def __init__(self, *, resources: Optional[List["_models.TopologyResource"]] = None, **kwargs): """ :keyword resources: :paramtype resources: list[~azure.mgmt.network.v2018_07_01.models.TopologyResource] """ super().__init__(**kwargs) self.id = None self.created_date_time = None self.last_modified = None self.resources = resources class TopologyAssociation(_serialization.Model): """Resources that have an association with the parent resource. :ivar name: The name of the resource that is associated with the parent resource. :vartype name: str :ivar resource_id: The ID of the resource that is associated with the parent resource. :vartype resource_id: str :ivar association_type: The association type of the child resource to the parent resource. Known values are: "Associated" and "Contains". :vartype association_type: str or ~azure.mgmt.network.v2018_07_01.models.AssociationType """ _attribute_map = { "name": {"key": "name", "type": "str"}, "resource_id": {"key": "resourceId", "type": "str"}, "association_type": {"key": "associationType", "type": "str"}, } def __init__( self, *, name: Optional[str] = None, resource_id: Optional[str] = None, association_type: Optional[Union[str, "_models.AssociationType"]] = None, **kwargs ): """ :keyword name: The name of the resource that is associated with the parent resource. :paramtype name: str :keyword resource_id: The ID of the resource that is associated with the parent resource. :paramtype resource_id: str :keyword association_type: The association type of the child resource to the parent resource. Known values are: "Associated" and "Contains". :paramtype association_type: str or ~azure.mgmt.network.v2018_07_01.models.AssociationType """ super().__init__(**kwargs) self.name = name self.resource_id = resource_id self.association_type = association_type class TopologyParameters(_serialization.Model): """Parameters that define the representation of topology. :ivar target_resource_group_name: The name of the target resource group to perform topology on. :vartype target_resource_group_name: str :ivar target_virtual_network: The reference of the Virtual Network resource. :vartype target_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar target_subnet: The reference of the Subnet resource. :vartype target_subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource """ _attribute_map = { "target_resource_group_name": {"key": "targetResourceGroupName", "type": "str"}, "target_virtual_network": {"key": "targetVirtualNetwork", "type": "SubResource"}, "target_subnet": {"key": "targetSubnet", "type": "SubResource"}, } def __init__( self, *, target_resource_group_name: Optional[str] = None, target_virtual_network: Optional["_models.SubResource"] = None, target_subnet: Optional["_models.SubResource"] = None, **kwargs ): """ :keyword target_resource_group_name: The name of the target resource group to perform topology on. :paramtype target_resource_group_name: str :keyword target_virtual_network: The reference of the Virtual Network resource. :paramtype target_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword target_subnet: The reference of the Subnet resource. :paramtype target_subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource """ super().__init__(**kwargs) self.target_resource_group_name = target_resource_group_name self.target_virtual_network = target_virtual_network self.target_subnet = target_subnet class TopologyResource(_serialization.Model): """The network resource topology information for the given resource group. :ivar name: Name of the resource. :vartype name: str :ivar id: ID of the resource. :vartype id: str :ivar location: Resource location. :vartype location: str :ivar associations: Holds the associations the resource has with other resources in the resource group. :vartype associations: list[~azure.mgmt.network.v2018_07_01.models.TopologyAssociation] """ _attribute_map = { "name": {"key": "name", "type": "str"}, "id": {"key": "id", "type": "str"}, "location": {"key": "location", "type": "str"}, "associations": {"key": "associations", "type": "[TopologyAssociation]"}, } def __init__( self, *, name: Optional[str] = None, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, associations: Optional[List["_models.TopologyAssociation"]] = None, **kwargs ): """ :keyword name: Name of the resource. :paramtype name: str :keyword id: ID of the resource. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword associations: Holds the associations the resource has with other resources in the resource group. :paramtype associations: list[~azure.mgmt.network.v2018_07_01.models.TopologyAssociation] """ super().__init__(**kwargs) self.name = name self.id = id self.location = location self.associations = associations class TrafficAnalyticsConfigurationProperties(_serialization.Model): """Parameters that define the configuration of traffic analytics. All required parameters must be populated in order to send to Azure. :ivar enabled: Flag to enable/disable traffic analytics. Required. :vartype enabled: bool :ivar workspace_id: The resource guid of the attached workspace. Required. :vartype workspace_id: str :ivar workspace_region: The location of the attached workspace. Required. :vartype workspace_region: str :ivar workspace_resource_id: Resource Id of the attached workspace. Required. :vartype workspace_resource_id: str """ _validation = { "enabled": {"required": True}, "workspace_id": {"required": True}, "workspace_region": {"required": True}, "workspace_resource_id": {"required": True}, } _attribute_map = { "enabled": {"key": "enabled", "type": "bool"}, "workspace_id": {"key": "workspaceId", "type": "str"}, "workspace_region": {"key": "workspaceRegion", "type": "str"}, "workspace_resource_id": {"key": "workspaceResourceId", "type": "str"}, } def __init__( self, *, enabled: bool, workspace_id: str, workspace_region: str, workspace_resource_id: str, **kwargs ): """ :keyword enabled: Flag to enable/disable traffic analytics. Required. :paramtype enabled: bool :keyword workspace_id: The resource guid of the attached workspace. Required. :paramtype workspace_id: str :keyword workspace_region: The location of the attached workspace. Required. :paramtype workspace_region: str :keyword workspace_resource_id: Resource Id of the attached workspace. Required. :paramtype workspace_resource_id: str """ super().__init__(**kwargs) self.enabled = enabled self.workspace_id = workspace_id self.workspace_region = workspace_region self.workspace_resource_id = workspace_resource_id class TrafficAnalyticsProperties(_serialization.Model): """Parameters that define the configuration of traffic analytics. All required parameters must be populated in order to send to Azure. :ivar network_watcher_flow_analytics_configuration: Parameters that define the configuration of traffic analytics. Required. :vartype network_watcher_flow_analytics_configuration: ~azure.mgmt.network.v2018_07_01.models.TrafficAnalyticsConfigurationProperties """ _validation = { "network_watcher_flow_analytics_configuration": {"required": True}, } _attribute_map = { "network_watcher_flow_analytics_configuration": { "key": "networkWatcherFlowAnalyticsConfiguration", "type": "TrafficAnalyticsConfigurationProperties", }, } def __init__( self, *, network_watcher_flow_analytics_configuration: "_models.TrafficAnalyticsConfigurationProperties", **kwargs ): """ :keyword network_watcher_flow_analytics_configuration: Parameters that define the configuration of traffic analytics. Required. :paramtype network_watcher_flow_analytics_configuration: ~azure.mgmt.network.v2018_07_01.models.TrafficAnalyticsConfigurationProperties """ super().__init__(**kwargs) self.network_watcher_flow_analytics_configuration = network_watcher_flow_analytics_configuration class TrafficQuery(_serialization.Model): """Parameters to compare with network configuration. All required parameters must be populated in order to send to Azure. :ivar direction: The direction of the traffic. Accepted values are 'Inbound' and 'Outbound'. Required. Known values are: "Inbound" and "Outbound". :vartype direction: str or ~azure.mgmt.network.v2018_07_01.models.Direction :ivar protocol: Protocol to be verified on. Accepted values are '*', TCP, UDP. Required. :vartype protocol: str :ivar source: Traffic source. Accepted values are '*', IP Address/CIDR, Service Tag. Required. :vartype source: str :ivar destination: Traffic destination. Accepted values are: '*', IP Address/CIDR, Service Tag. Required. :vartype destination: str :ivar destination_port: Traffic destination port. Accepted values are '*', port (for example, 3389) and port range (for example, 80-100). Required. :vartype destination_port: str """ _validation = { "direction": {"required": True}, "protocol": {"required": True}, "source": {"required": True}, "destination": {"required": True}, "destination_port": {"required": True}, } _attribute_map = { "direction": {"key": "direction", "type": "str"}, "protocol": {"key": "protocol", "type": "str"}, "source": {"key": "source", "type": "str"}, "destination": {"key": "destination", "type": "str"}, "destination_port": {"key": "destinationPort", "type": "str"}, } def __init__( self, *, direction: Union[str, "_models.Direction"], protocol: str, source: str, destination: str, destination_port: str, **kwargs ): """ :keyword direction: The direction of the traffic. Accepted values are 'Inbound' and 'Outbound'. Required. Known values are: "Inbound" and "Outbound". :paramtype direction: str or ~azure.mgmt.network.v2018_07_01.models.Direction :keyword protocol: Protocol to be verified on. Accepted values are '*', TCP, UDP. Required. :paramtype protocol: str :keyword source: Traffic source. Accepted values are '*', IP Address/CIDR, Service Tag. Required. :paramtype source: str :keyword destination: Traffic destination. Accepted values are: '*', IP Address/CIDR, Service Tag. Required. :paramtype destination: str :keyword destination_port: Traffic destination port. Accepted values are '*', port (for example, 3389) and port range (for example, 80-100). Required. :paramtype destination_port: str """ super().__init__(**kwargs) self.direction = direction self.protocol = protocol self.source = source self.destination = destination self.destination_port = destination_port class TroubleshootingDetails(_serialization.Model): """Information gained from troubleshooting of specified resource. :ivar id: The id of the get troubleshoot operation. :vartype id: str :ivar reason_type: Reason type of failure. :vartype reason_type: str :ivar summary: A summary of troubleshooting. :vartype summary: str :ivar detail: Details on troubleshooting results. :vartype detail: str :ivar recommended_actions: List of recommended actions. :vartype recommended_actions: list[~azure.mgmt.network.v2018_07_01.models.TroubleshootingRecommendedActions] """ _attribute_map = { "id": {"key": "id", "type": "str"}, "reason_type": {"key": "reasonType", "type": "str"}, "summary": {"key": "summary", "type": "str"}, "detail": {"key": "detail", "type": "str"}, "recommended_actions": {"key": "recommendedActions", "type": "[TroubleshootingRecommendedActions]"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin reason_type: Optional[str] = None, summary: Optional[str] = None, detail: Optional[str] = None, recommended_actions: Optional[List["_models.TroubleshootingRecommendedActions"]] = None, **kwargs ): """ :keyword id: The id of the get troubleshoot operation. :paramtype id: str :keyword reason_type: Reason type of failure. :paramtype reason_type: str :keyword summary: A summary of troubleshooting. :paramtype summary: str :keyword detail: Details on troubleshooting results. :paramtype detail: str :keyword recommended_actions: List of recommended actions. :paramtype recommended_actions: list[~azure.mgmt.network.v2018_07_01.models.TroubleshootingRecommendedActions] """ super().__init__(**kwargs) self.id = id self.reason_type = reason_type self.summary = summary self.detail = detail self.recommended_actions = recommended_actions class TroubleshootingParameters(_serialization.Model): """Parameters that define the resource to troubleshoot. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The target resource to troubleshoot. Required. :vartype target_resource_id: str :ivar storage_id: The ID for the storage account to save the troubleshoot result. Required. :vartype storage_id: str :ivar storage_path: The path to the blob to save the troubleshoot result in. Required. :vartype storage_path: str """ _validation = { "target_resource_id": {"required": True}, "storage_id": {"required": True}, "storage_path": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, "storage_id": {"key": "properties.storageId", "type": "str"}, "storage_path": {"key": "properties.storagePath", "type": "str"}, } def __init__(self, *, target_resource_id: str, storage_id: str, storage_path: str, **kwargs): """ :keyword target_resource_id: The target resource to troubleshoot. Required. :paramtype target_resource_id: str :keyword storage_id: The ID for the storage account to save the troubleshoot result. Required. :paramtype storage_id: str :keyword storage_path: The path to the blob to save the troubleshoot result in. Required. :paramtype storage_path: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id self.storage_id = storage_id self.storage_path = storage_path class TroubleshootingRecommendedActions(_serialization.Model): """Recommended actions based on discovered issues. :ivar action_id: ID of the recommended action. :vartype action_id: str :ivar action_text: Description of recommended actions. :vartype action_text: str :ivar action_uri: The uri linking to a documentation for the recommended troubleshooting actions. :vartype action_uri: str :ivar action_uri_text: The information from the URI for the recommended troubleshooting actions. :vartype action_uri_text: str """ _attribute_map = { "action_id": {"key": "actionId", "type": "str"}, "action_text": {"key": "actionText", "type": "str"}, "action_uri": {"key": "actionUri", "type": "str"}, "action_uri_text": {"key": "actionUriText", "type": "str"}, } def __init__( self, *, action_id: Optional[str] = None, action_text: Optional[str] = None, action_uri: Optional[str] = None, action_uri_text: Optional[str] = None, **kwargs ): """ :keyword action_id: ID of the recommended action. :paramtype action_id: str :keyword action_text: Description of recommended actions. :paramtype action_text: str :keyword action_uri: The uri linking to a documentation for the recommended troubleshooting actions. :paramtype action_uri: str :keyword action_uri_text: The information from the URI for the recommended troubleshooting actions. :paramtype action_uri_text: str """ super().__init__(**kwargs) self.action_id = action_id self.action_text = action_text self.action_uri = action_uri self.action_uri_text = action_uri_text class TroubleshootingResult(_serialization.Model): """Troubleshooting information gained from specified resource. :ivar start_time: The start time of the troubleshooting. :vartype start_time: ~datetime.datetime :ivar end_time: The end time of the troubleshooting. :vartype end_time: ~datetime.datetime :ivar code: The result code of the troubleshooting. :vartype code: str :ivar results: Information from troubleshooting. :vartype results: list[~azure.mgmt.network.v2018_07_01.models.TroubleshootingDetails] """ _attribute_map = { "start_time": {"key": "startTime", "type": "iso-8601"}, "end_time": {"key": "endTime", "type": "iso-8601"}, "code": {"key": "code", "type": "str"}, "results": {"key": "results", "type": "[TroubleshootingDetails]"}, } def __init__( self, *, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, code: Optional[str] = None, results: Optional[List["_models.TroubleshootingDetails"]] = None, **kwargs ): """ :keyword start_time: The start time of the troubleshooting. :paramtype start_time: ~datetime.datetime :keyword end_time: The end time of the troubleshooting. :paramtype end_time: ~datetime.datetime :keyword code: The result code of the troubleshooting. :paramtype code: str :keyword results: Information from troubleshooting. :paramtype results: list[~azure.mgmt.network.v2018_07_01.models.TroubleshootingDetails] """ super().__init__(**kwargs) self.start_time = start_time self.end_time = end_time self.code = code self.results = results class TunnelConnectionHealth(_serialization.Model): """VirtualNetworkGatewayConnection properties. Variables are only populated by the server, and will be ignored when sending a request. :ivar tunnel: Tunnel name. :vartype tunnel: str :ivar connection_status: Virtual network Gateway connection status. Known values are: "Unknown", "Connecting", "Connected", and "NotConnected". :vartype connection_status: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionStatus :ivar ingress_bytes_transferred: The Ingress Bytes Transferred in this connection. :vartype ingress_bytes_transferred: int :ivar egress_bytes_transferred: The Egress Bytes Transferred in this connection. :vartype egress_bytes_transferred: int :ivar last_connection_established_utc_time: The time at which connection was established in Utc format. :vartype last_connection_established_utc_time: str """ _validation = { "tunnel": {"readonly": True}, "connection_status": {"readonly": True}, "ingress_bytes_transferred": {"readonly": True}, "egress_bytes_transferred": {"readonly": True}, "last_connection_established_utc_time": {"readonly": True}, } _attribute_map = { "tunnel": {"key": "tunnel", "type": "str"}, "connection_status": {"key": "connectionStatus", "type": "str"}, "ingress_bytes_transferred": {"key": "ingressBytesTransferred", "type": "int"}, "egress_bytes_transferred": {"key": "egressBytesTransferred", "type": "int"}, "last_connection_established_utc_time": {"key": "lastConnectionEstablishedUtcTime", "type": "str"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.tunnel = None self.connection_status = None self.ingress_bytes_transferred = None self.egress_bytes_transferred = None self.last_connection_established_utc_time = None class Usage(_serialization.Model): """Describes network resource usage. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource identifier. :vartype id: str :ivar unit: An enum describing the unit of measurement. Required. "Count" :vartype unit: str or ~azure.mgmt.network.v2018_07_01.models.UsageUnit :ivar current_value: The current value of the usage. Required. :vartype current_value: int :ivar limit: The limit of usage. Required. :vartype limit: int :ivar name: The name of the type of usage. Required. :vartype name: ~azure.mgmt.network.v2018_07_01.models.UsageName """ _validation = { "id": {"readonly": True}, "unit": {"required": True}, "current_value": {"required": True}, "limit": {"required": True}, "name": {"required": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "unit": {"key": "unit", "type": "str"}, "current_value": {"key": "currentValue", "type": "int"}, "limit": {"key": "limit", "type": "int"}, "name": {"key": "name", "type": "UsageName"}, } def __init__( self, *, unit: Union[str, "_models.UsageUnit"], current_value: int, limit: int, name: "_models.UsageName", **kwargs ): """ :keyword unit: An enum describing the unit of measurement. Required. "Count" :paramtype unit: str or ~azure.mgmt.network.v2018_07_01.models.UsageUnit :keyword current_value: The current value of the usage. Required. :paramtype current_value: int :keyword limit: The limit of usage. Required. :paramtype limit: int :keyword name: The name of the type of usage. Required. :paramtype name: ~azure.mgmt.network.v2018_07_01.models.UsageName """ super().__init__(**kwargs) self.id = None self.unit = unit self.current_value = current_value self.limit = limit self.name = name class UsageName(_serialization.Model): """The usage names. :ivar value: A string describing the resource name. :vartype value: str :ivar localized_value: A localized string describing the resource name. :vartype localized_value: str """ _attribute_map = { "value": {"key": "value", "type": "str"}, "localized_value": {"key": "localizedValue", "type": "str"}, } def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None, **kwargs): """ :keyword value: A string describing the resource name. :paramtype value: str :keyword localized_value: A localized string describing the resource name. :paramtype localized_value: str """ super().__init__(**kwargs) self.value = value self.localized_value = localized_value class UsagesListResult(_serialization.Model): """The list usages operation response. :ivar value: The list network resource usages. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.Usage] :ivar next_link: URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[Usage]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.Usage"]] = None, next_link: Optional[str] = None, **kwargs): """ :keyword value: The list network resource usages. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.Usage] :keyword next_link: URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class VerificationIPFlowParameters(_serialization.Model): """Parameters that define the IP flow to be verified. All required parameters must be populated in order to send to Azure. :ivar target_resource_id: The ID of the target resource to perform next-hop on. Required. :vartype target_resource_id: str :ivar direction: The direction of the packet represented as a 5-tuple. Required. Known values are: "Inbound" and "Outbound". :vartype direction: str or ~azure.mgmt.network.v2018_07_01.models.Direction :ivar protocol: Protocol to be verified on. Required. Known values are: "TCP" and "UDP". :vartype protocol: str or ~azure.mgmt.network.v2018_07_01.models.IpFlowProtocol :ivar local_port: The local port. Acceptable values are a single integer in the range (0-65535). Support for * for the source port, which depends on the direction. Required. :vartype local_port: str :ivar remote_port: The remote port. Acceptable values are a single integer in the range (0-65535). Support for * for the source port, which depends on the direction. Required. :vartype remote_port: str :ivar local_ip_address: The local IP address. Acceptable values are valid IPv4 addresses. Required. :vartype local_ip_address: str :ivar remote_ip_address: The remote IP address. Acceptable values are valid IPv4 addresses. Required. :vartype remote_ip_address: str :ivar target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled on any of them, then this parameter must be specified. Otherwise optional). :vartype target_nic_resource_id: str """ _validation = { "target_resource_id": {"required": True}, "direction": {"required": True}, "protocol": {"required": True}, "local_port": {"required": True}, "remote_port": {"required": True}, "local_ip_address": {"required": True}, "remote_ip_address": {"required": True}, } _attribute_map = { "target_resource_id": {"key": "targetResourceId", "type": "str"}, "direction": {"key": "direction", "type": "str"}, "protocol": {"key": "protocol", "type": "str"}, "local_port": {"key": "localPort", "type": "str"}, "remote_port": {"key": "remotePort", "type": "str"}, "local_ip_address": {"key": "localIPAddress", "type": "str"}, "remote_ip_address": {"key": "remoteIPAddress", "type": "str"}, "target_nic_resource_id": {"key": "targetNicResourceId", "type": "str"}, } def __init__( self, *, target_resource_id: str, direction: Union[str, "_models.Direction"], protocol: Union[str, "_models.IpFlowProtocol"], local_port: str, remote_port: str, local_ip_address: str, remote_ip_address: str, target_nic_resource_id: Optional[str] = None, **kwargs ): """ :keyword target_resource_id: The ID of the target resource to perform next-hop on. Required. :paramtype target_resource_id: str :keyword direction: The direction of the packet represented as a 5-tuple. Required. Known values are: "Inbound" and "Outbound". :paramtype direction: str or ~azure.mgmt.network.v2018_07_01.models.Direction :keyword protocol: Protocol to be verified on. Required. Known values are: "TCP" and "UDP". :paramtype protocol: str or ~azure.mgmt.network.v2018_07_01.models.IpFlowProtocol :keyword local_port: The local port. Acceptable values are a single integer in the range (0-65535). Support for * for the source port, which depends on the direction. Required. :paramtype local_port: str :keyword remote_port: The remote port. Acceptable values are a single integer in the range (0-65535). Support for * for the source port, which depends on the direction. Required. :paramtype remote_port: str :keyword local_ip_address: The local IP address. Acceptable values are valid IPv4 addresses. Required. :paramtype local_ip_address: str :keyword remote_ip_address: The remote IP address. Acceptable values are valid IPv4 addresses. Required. :paramtype remote_ip_address: str :keyword target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled on any of them, then this parameter must be specified. Otherwise optional). :paramtype target_nic_resource_id: str """ super().__init__(**kwargs) self.target_resource_id = target_resource_id self.direction = direction self.protocol = protocol self.local_port = local_port self.remote_port = remote_port self.local_ip_address = local_ip_address self.remote_ip_address = remote_ip_address self.target_nic_resource_id = target_nic_resource_id class VerificationIPFlowResult(_serialization.Model): """Results of IP flow verification on the target resource. :ivar access: Indicates whether the traffic is allowed or denied. Known values are: "Allow" and "Deny". :vartype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :ivar rule_name: Name of the rule. If input is not matched against any security rule, it is not displayed. :vartype rule_name: str """ _attribute_map = { "access": {"key": "access", "type": "str"}, "rule_name": {"key": "ruleName", "type": "str"}, } def __init__( self, *, access: Optional[Union[str, "_models.Access"]] = None, rule_name: Optional[str] = None, **kwargs ): """ :keyword access: Indicates whether the traffic is allowed or denied. Known values are: "Allow" and "Deny". :paramtype access: str or ~azure.mgmt.network.v2018_07_01.models.Access :keyword rule_name: Name of the rule. If input is not matched against any security rule, it is not displayed. :paramtype rule_name: str """ super().__init__(**kwargs) self.access = access self.rule_name = rule_name class VirtualHub(Resource): """VirtualHub Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar virtual_wan: The VirtualWAN to which the VirtualHub belongs. :vartype virtual_wan: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar hub_virtual_network_connections: list of all vnet connections with this VirtualHub. :vartype hub_virtual_network_connections: list[~azure.mgmt.network.v2018_07_01.models.HubVirtualNetworkConnection] :ivar address_prefix: Address-prefix for this VirtualHub. :vartype address_prefix: str :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "virtual_wan": {"key": "properties.virtualWan", "type": "SubResource"}, "hub_virtual_network_connections": { "key": "properties.hubVirtualNetworkConnections", "type": "[HubVirtualNetworkConnection]", }, "address_prefix": {"key": "properties.addressPrefix", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, virtual_wan: Optional["_models.SubResource"] = None, hub_virtual_network_connections: Optional[List["_models.HubVirtualNetworkConnection"]] = None, address_prefix: Optional[str] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword virtual_wan: The VirtualWAN to which the VirtualHub belongs. :paramtype virtual_wan: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword hub_virtual_network_connections: list of all vnet connections with this VirtualHub. :paramtype hub_virtual_network_connections: list[~azure.mgmt.network.v2018_07_01.models.HubVirtualNetworkConnection] :keyword address_prefix: Address-prefix for this VirtualHub. :paramtype address_prefix: str :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.virtual_wan = virtual_wan self.hub_virtual_network_connections = hub_virtual_network_connections self.address_prefix = address_prefix self.provisioning_state = provisioning_state class VirtualNetwork(Resource): # pylint: disable=too-many-instance-attributes """Virtual Network resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar address_space: The AddressSpace that contains an array of IP address ranges that can be used by subnets. :vartype address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :ivar dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network. :vartype dhcp_options: ~azure.mgmt.network.v2018_07_01.models.DhcpOptions :ivar subnets: A list of subnets in a Virtual Network. :vartype subnets: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :ivar virtual_network_peerings: A list of peerings in a Virtual Network. :vartype virtual_network_peerings: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeering] :ivar resource_guid: The resourceGuid property of the Virtual Network resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected resources in the virtual network. It requires a DDoS protection plan associated with the resource. :vartype enable_ddos_protection: bool :ivar enable_vm_protection: Indicates if VM protection is enabled for all the subnets in the virtual network. :vartype enable_vm_protection: bool :ivar ddos_protection_plan: The DDoS protection plan associated with the virtual network. :vartype ddos_protection_plan: ~azure.mgmt.network.v2018_07_01.models.SubResource """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "address_space": {"key": "properties.addressSpace", "type": "AddressSpace"}, "dhcp_options": {"key": "properties.dhcpOptions", "type": "DhcpOptions"}, "subnets": {"key": "properties.subnets", "type": "[Subnet]"}, "virtual_network_peerings": {"key": "properties.virtualNetworkPeerings", "type": "[VirtualNetworkPeering]"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "enable_ddos_protection": {"key": "properties.enableDdosProtection", "type": "bool"}, "enable_vm_protection": {"key": "properties.enableVmProtection", "type": "bool"}, "ddos_protection_plan": {"key": "properties.ddosProtectionPlan", "type": "SubResource"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, address_space: Optional["_models.AddressSpace"] = None, dhcp_options: Optional["_models.DhcpOptions"] = None, subnets: Optional[List["_models.Subnet"]] = None, virtual_network_peerings: Optional[List["_models.VirtualNetworkPeering"]] = None, resource_guid: Optional[str] = None, provisioning_state: Optional[str] = None, enable_ddos_protection: bool = False, enable_vm_protection: bool = False, ddos_protection_plan: Optional["_models.SubResource"] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: Gets a unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword address_space: The AddressSpace that contains an array of IP address ranges that can be used by subnets. :paramtype address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :keyword dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network. :paramtype dhcp_options: ~azure.mgmt.network.v2018_07_01.models.DhcpOptions :keyword subnets: A list of subnets in a Virtual Network. :paramtype subnets: list[~azure.mgmt.network.v2018_07_01.models.Subnet] :keyword virtual_network_peerings: A list of peerings in a Virtual Network. :paramtype virtual_network_peerings: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeering] :keyword resource_guid: The resourceGuid property of the Virtual Network resource. :paramtype resource_guid: str :keyword provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :paramtype provisioning_state: str :keyword enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected resources in the virtual network. It requires a DDoS protection plan associated with the resource. :paramtype enable_ddos_protection: bool :keyword enable_vm_protection: Indicates if VM protection is enabled for all the subnets in the virtual network. :paramtype enable_vm_protection: bool :keyword ddos_protection_plan: The DDoS protection plan associated with the virtual network. :paramtype ddos_protection_plan: ~azure.mgmt.network.v2018_07_01.models.SubResource """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.address_space = address_space self.dhcp_options = dhcp_options self.subnets = subnets self.virtual_network_peerings = virtual_network_peerings self.resource_guid = resource_guid self.provisioning_state = provisioning_state self.enable_ddos_protection = enable_ddos_protection self.enable_vm_protection = enable_vm_protection self.ddos_protection_plan = ddos_protection_plan class VirtualNetworkConnectionGatewayReference(_serialization.Model): """A reference to VirtualNetworkGateway or LocalNetworkGateway resource. All required parameters must be populated in order to send to Azure. :ivar id: The ID of VirtualNetworkGateway or LocalNetworkGateway resource. Required. :vartype id: str """ _validation = { "id": {"required": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, } def __init__(self, *, id: str, **kwargs): # pylint: disable=redefined-builtin """ :keyword id: The ID of VirtualNetworkGateway or LocalNetworkGateway resource. Required. :paramtype id: str """ super().__init__(**kwargs) self.id = id class VirtualNetworkGateway(Resource): # pylint: disable=too-many-instance-attributes """A common class for general resource information. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar ip_configurations: IP configurations for virtual network gateway. :vartype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayIPConfiguration] :ivar gateway_type: The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'. Known values are: "Vpn" and "ExpressRoute". :vartype gateway_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayType :ivar vpn_type: The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'. Known values are: "PolicyBased" and "RouteBased". :vartype vpn_type: str or ~azure.mgmt.network.v2018_07_01.models.VpnType :ivar enable_bgp: Whether BGP is enabled for this virtual network gateway or not. :vartype enable_bgp: bool :ivar active: ActiveActive flag. :vartype active: bool :ivar gateway_default_site: The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting. :vartype gateway_default_site: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway. :vartype sku: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySku :ivar vpn_client_configuration: The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations. :vartype vpn_client_configuration: ~azure.mgmt.network.v2018_07_01.models.VpnClientConfiguration :ivar bgp_settings: Virtual network gateway's BGP speaker settings. :vartype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :ivar resource_guid: The resource GUID property of the VirtualNetworkGateway resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the VirtualNetworkGateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "ip_configurations": {"key": "properties.ipConfigurations", "type": "[VirtualNetworkGatewayIPConfiguration]"}, "gateway_type": {"key": "properties.gatewayType", "type": "str"}, "vpn_type": {"key": "properties.vpnType", "type": "str"}, "enable_bgp": {"key": "properties.enableBgp", "type": "bool"}, "active": {"key": "properties.activeActive", "type": "bool"}, "gateway_default_site": {"key": "properties.gatewayDefaultSite", "type": "SubResource"}, "sku": {"key": "properties.sku", "type": "VirtualNetworkGatewaySku"}, "vpn_client_configuration": {"key": "properties.vpnClientConfiguration", "type": "VpnClientConfiguration"}, "bgp_settings": {"key": "properties.bgpSettings", "type": "BgpSettings"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, ip_configurations: Optional[List["_models.VirtualNetworkGatewayIPConfiguration"]] = None, gateway_type: Optional[Union[str, "_models.VirtualNetworkGatewayType"]] = None, vpn_type: Optional[Union[str, "_models.VpnType"]] = None, enable_bgp: Optional[bool] = None, active: Optional[bool] = None, gateway_default_site: Optional["_models.SubResource"] = None, sku: Optional["_models.VirtualNetworkGatewaySku"] = None, vpn_client_configuration: Optional["_models.VpnClientConfiguration"] = None, bgp_settings: Optional["_models.BgpSettings"] = None, resource_guid: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: Gets a unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword ip_configurations: IP configurations for virtual network gateway. :paramtype ip_configurations: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayIPConfiguration] :keyword gateway_type: The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'. Known values are: "Vpn" and "ExpressRoute". :paramtype gateway_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayType :keyword vpn_type: The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'. Known values are: "PolicyBased" and "RouteBased". :paramtype vpn_type: str or ~azure.mgmt.network.v2018_07_01.models.VpnType :keyword enable_bgp: Whether BGP is enabled for this virtual network gateway or not. :paramtype enable_bgp: bool :keyword active: ActiveActive flag. :paramtype active: bool :keyword gateway_default_site: The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting. :paramtype gateway_default_site: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway. :paramtype sku: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySku :keyword vpn_client_configuration: The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations. :paramtype vpn_client_configuration: ~azure.mgmt.network.v2018_07_01.models.VpnClientConfiguration :keyword bgp_settings: Virtual network gateway's BGP speaker settings. :paramtype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :keyword resource_guid: The resource GUID property of the VirtualNetworkGateway resource. :paramtype resource_guid: str """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.ip_configurations = ip_configurations self.gateway_type = gateway_type self.vpn_type = vpn_type self.enable_bgp = enable_bgp self.active = active self.gateway_default_site = gateway_default_site self.sku = sku self.vpn_client_configuration = vpn_client_configuration self.bgp_settings = bgp_settings self.resource_guid = resource_guid self.provisioning_state = None class VirtualNetworkGatewayConnection(Resource): # pylint: disable=too-many-instance-attributes """A common class for general resource information. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar authorization_key: The authorizationKey. :vartype authorization_key: str :ivar virtual_network_gateway1: The reference to virtual network gateway resource. Required. :vartype virtual_network_gateway1: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway :ivar virtual_network_gateway2: The reference to virtual network gateway resource. :vartype virtual_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway :ivar local_network_gateway2: The reference to local network gateway resource. :vartype local_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.LocalNetworkGateway :ivar connection_type: Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec", "Vnet2Vnet", "ExpressRoute", and "VPNClient". :vartype connection_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionType :ivar routing_weight: The routing weight. :vartype routing_weight: int :ivar shared_key: The IPSec shared key. :vartype shared_key: str :ivar connection_status: Virtual network Gateway connection status. Possible values are 'Unknown', 'Connecting', 'Connected' and 'NotConnected'. Known values are: "Unknown", "Connecting", "Connected", and "NotConnected". :vartype connection_status: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionStatus :ivar tunnel_connection_status: Collection of all tunnels' connection health status. :vartype tunnel_connection_status: list[~azure.mgmt.network.v2018_07_01.models.TunnelConnectionHealth] :ivar egress_bytes_transferred: The egress bytes transferred in this connection. :vartype egress_bytes_transferred: int :ivar ingress_bytes_transferred: The ingress bytes transferred in this connection. :vartype ingress_bytes_transferred: int :ivar peer: The reference to peerings resource. :vartype peer: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar enable_bgp: EnableBgp flag. :vartype enable_bgp: bool :ivar use_policy_based_traffic_selectors: Enable policy-based traffic selectors. :vartype use_policy_based_traffic_selectors: bool :ivar ipsec_policies: The IPSec Policies to be considered by this connection. :vartype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :ivar resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the VirtualNetworkGatewayConnection resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding. :vartype express_route_gateway_bypass: bool """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "virtual_network_gateway1": {"required": True}, "connection_type": {"required": True}, "connection_status": {"readonly": True}, "tunnel_connection_status": {"readonly": True}, "egress_bytes_transferred": {"readonly": True}, "ingress_bytes_transferred": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "authorization_key": {"key": "properties.authorizationKey", "type": "str"}, "virtual_network_gateway1": {"key": "properties.virtualNetworkGateway1", "type": "VirtualNetworkGateway"}, "virtual_network_gateway2": {"key": "properties.virtualNetworkGateway2", "type": "VirtualNetworkGateway"}, "local_network_gateway2": {"key": "properties.localNetworkGateway2", "type": "LocalNetworkGateway"}, "connection_type": {"key": "properties.connectionType", "type": "str"}, "routing_weight": {"key": "properties.routingWeight", "type": "int"}, "shared_key": {"key": "properties.sharedKey", "type": "str"}, "connection_status": {"key": "properties.connectionStatus", "type": "str"}, "tunnel_connection_status": {"key": "properties.tunnelConnectionStatus", "type": "[TunnelConnectionHealth]"}, "egress_bytes_transferred": {"key": "properties.egressBytesTransferred", "type": "int"}, "ingress_bytes_transferred": {"key": "properties.ingressBytesTransferred", "type": "int"}, "peer": {"key": "properties.peer", "type": "SubResource"}, "enable_bgp": {"key": "properties.enableBgp", "type": "bool"}, "use_policy_based_traffic_selectors": {"key": "properties.usePolicyBasedTrafficSelectors", "type": "bool"}, "ipsec_policies": {"key": "properties.ipsecPolicies", "type": "[IpsecPolicy]"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "express_route_gateway_bypass": {"key": "properties.expressRouteGatewayBypass", "type": "bool"}, } def __init__( # pylint: disable=too-many-locals self, *, virtual_network_gateway1: "_models.VirtualNetworkGateway", connection_type: Union[str, "_models.VirtualNetworkGatewayConnectionType"], id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, authorization_key: Optional[str] = None, virtual_network_gateway2: Optional["_models.VirtualNetworkGateway"] = None, local_network_gateway2: Optional["_models.LocalNetworkGateway"] = None, routing_weight: Optional[int] = None, shared_key: Optional[str] = None, peer: Optional["_models.SubResource"] = None, enable_bgp: Optional[bool] = None, use_policy_based_traffic_selectors: Optional[bool] = None, ipsec_policies: Optional[List["_models.IpsecPolicy"]] = None, resource_guid: Optional[str] = None, express_route_gateway_bypass: Optional[bool] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: Gets a unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword authorization_key: The authorizationKey. :paramtype authorization_key: str :keyword virtual_network_gateway1: The reference to virtual network gateway resource. Required. :paramtype virtual_network_gateway1: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway :keyword virtual_network_gateway2: The reference to virtual network gateway resource. :paramtype virtual_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway :keyword local_network_gateway2: The reference to local network gateway resource. :paramtype local_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.LocalNetworkGateway :keyword connection_type: Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec", "Vnet2Vnet", "ExpressRoute", and "VPNClient". :paramtype connection_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionType :keyword routing_weight: The routing weight. :paramtype routing_weight: int :keyword shared_key: The IPSec shared key. :paramtype shared_key: str :keyword peer: The reference to peerings resource. :paramtype peer: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword enable_bgp: EnableBgp flag. :paramtype enable_bgp: bool :keyword use_policy_based_traffic_selectors: Enable policy-based traffic selectors. :paramtype use_policy_based_traffic_selectors: bool :keyword ipsec_policies: The IPSec Policies to be considered by this connection. :paramtype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :keyword resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection resource. :paramtype resource_guid: str :keyword express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding. :paramtype express_route_gateway_bypass: bool """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.authorization_key = authorization_key self.virtual_network_gateway1 = virtual_network_gateway1 self.virtual_network_gateway2 = virtual_network_gateway2 self.local_network_gateway2 = local_network_gateway2 self.connection_type = connection_type self.routing_weight = routing_weight self.shared_key = shared_key self.connection_status = None self.tunnel_connection_status = None self.egress_bytes_transferred = None self.ingress_bytes_transferred = None self.peer = peer self.enable_bgp = enable_bgp self.use_policy_based_traffic_selectors = use_policy_based_traffic_selectors self.ipsec_policies = ipsec_policies self.resource_guid = resource_guid self.provisioning_state = None self.express_route_gateway_bypass = express_route_gateway_bypass class VirtualNetworkGatewayConnectionListEntity(Resource): # pylint: disable=too-many-instance-attributes """A common class for general resource information. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar authorization_key: The authorizationKey. :vartype authorization_key: str :ivar virtual_network_gateway1: The reference to virtual network gateway resource. Required. :vartype virtual_network_gateway1: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :ivar virtual_network_gateway2: The reference to virtual network gateway resource. :vartype virtual_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :ivar local_network_gateway2: The reference to local network gateway resource. :vartype local_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :ivar connection_type: Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec", "Vnet2Vnet", "ExpressRoute", and "VPNClient". :vartype connection_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionType :ivar routing_weight: The routing weight. :vartype routing_weight: int :ivar shared_key: The IPSec shared key. :vartype shared_key: str :ivar connection_status: Virtual network Gateway connection status. Possible values are 'Unknown', 'Connecting', 'Connected' and 'NotConnected'. Known values are: "Unknown", "Connecting", "Connected", and "NotConnected". :vartype connection_status: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionStatus :ivar tunnel_connection_status: Collection of all tunnels' connection health status. :vartype tunnel_connection_status: list[~azure.mgmt.network.v2018_07_01.models.TunnelConnectionHealth] :ivar egress_bytes_transferred: The egress bytes transferred in this connection. :vartype egress_bytes_transferred: int :ivar ingress_bytes_transferred: The ingress bytes transferred in this connection. :vartype ingress_bytes_transferred: int :ivar peer: The reference to peerings resource. :vartype peer: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar enable_bgp: EnableBgp flag. :vartype enable_bgp: bool :ivar use_policy_based_traffic_selectors: Enable policy-based traffic selectors. :vartype use_policy_based_traffic_selectors: bool :ivar ipsec_policies: The IPSec Policies to be considered by this connection. :vartype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :ivar resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection resource. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the VirtualNetworkGatewayConnection resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str :ivar express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding. :vartype express_route_gateway_bypass: bool """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "virtual_network_gateway1": {"required": True}, "connection_type": {"required": True}, "connection_status": {"readonly": True}, "tunnel_connection_status": {"readonly": True}, "egress_bytes_transferred": {"readonly": True}, "ingress_bytes_transferred": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "authorization_key": {"key": "properties.authorizationKey", "type": "str"}, "virtual_network_gateway1": { "key": "properties.virtualNetworkGateway1", "type": "VirtualNetworkConnectionGatewayReference", }, "virtual_network_gateway2": { "key": "properties.virtualNetworkGateway2", "type": "VirtualNetworkConnectionGatewayReference", }, "local_network_gateway2": { "key": "properties.localNetworkGateway2", "type": "VirtualNetworkConnectionGatewayReference", }, "connection_type": {"key": "properties.connectionType", "type": "str"}, "routing_weight": {"key": "properties.routingWeight", "type": "int"}, "shared_key": {"key": "properties.sharedKey", "type": "str"}, "connection_status": {"key": "properties.connectionStatus", "type": "str"}, "tunnel_connection_status": {"key": "properties.tunnelConnectionStatus", "type": "[TunnelConnectionHealth]"}, "egress_bytes_transferred": {"key": "properties.egressBytesTransferred", "type": "int"}, "ingress_bytes_transferred": {"key": "properties.ingressBytesTransferred", "type": "int"}, "peer": {"key": "properties.peer", "type": "SubResource"}, "enable_bgp": {"key": "properties.enableBgp", "type": "bool"}, "use_policy_based_traffic_selectors": {"key": "properties.usePolicyBasedTrafficSelectors", "type": "bool"}, "ipsec_policies": {"key": "properties.ipsecPolicies", "type": "[IpsecPolicy]"}, "resource_guid": {"key": "properties.resourceGuid", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "express_route_gateway_bypass": {"key": "properties.expressRouteGatewayBypass", "type": "bool"}, } def __init__( # pylint: disable=too-many-locals self, *, virtual_network_gateway1: "_models.VirtualNetworkConnectionGatewayReference", connection_type: Union[str, "_models.VirtualNetworkGatewayConnectionType"], id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, authorization_key: Optional[str] = None, virtual_network_gateway2: Optional["_models.VirtualNetworkConnectionGatewayReference"] = None, local_network_gateway2: Optional["_models.VirtualNetworkConnectionGatewayReference"] = None, routing_weight: Optional[int] = None, shared_key: Optional[str] = None, peer: Optional["_models.SubResource"] = None, enable_bgp: Optional[bool] = None, use_policy_based_traffic_selectors: Optional[bool] = None, ipsec_policies: Optional[List["_models.IpsecPolicy"]] = None, resource_guid: Optional[str] = None, express_route_gateway_bypass: Optional[bool] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword etag: Gets a unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword authorization_key: The authorizationKey. :paramtype authorization_key: str :keyword virtual_network_gateway1: The reference to virtual network gateway resource. Required. :paramtype virtual_network_gateway1: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :keyword virtual_network_gateway2: The reference to virtual network gateway resource. :paramtype virtual_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :keyword local_network_gateway2: The reference to local network gateway resource. :paramtype local_network_gateway2: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkConnectionGatewayReference :keyword connection_type: Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec", "Vnet2Vnet", "ExpressRoute", and "VPNClient". :paramtype connection_type: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionType :keyword routing_weight: The routing weight. :paramtype routing_weight: int :keyword shared_key: The IPSec shared key. :paramtype shared_key: str :keyword peer: The reference to peerings resource. :paramtype peer: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword enable_bgp: EnableBgp flag. :paramtype enable_bgp: bool :keyword use_policy_based_traffic_selectors: Enable policy-based traffic selectors. :paramtype use_policy_based_traffic_selectors: bool :keyword ipsec_policies: The IPSec Policies to be considered by this connection. :paramtype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :keyword resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection resource. :paramtype resource_guid: str :keyword express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding. :paramtype express_route_gateway_bypass: bool """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = etag self.authorization_key = authorization_key self.virtual_network_gateway1 = virtual_network_gateway1 self.virtual_network_gateway2 = virtual_network_gateway2 self.local_network_gateway2 = local_network_gateway2 self.connection_type = connection_type self.routing_weight = routing_weight self.shared_key = shared_key self.connection_status = None self.tunnel_connection_status = None self.egress_bytes_transferred = None self.ingress_bytes_transferred = None self.peer = peer self.enable_bgp = enable_bgp self.use_policy_based_traffic_selectors = use_policy_based_traffic_selectors self.ipsec_policies = ipsec_policies self.resource_guid = resource_guid self.provisioning_state = None self.express_route_gateway_bypass = express_route_gateway_bypass class VirtualNetworkGatewayConnectionListResult(_serialization.Model): """Response for the ListVirtualNetworkGatewayConnections API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnection] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[VirtualNetworkGatewayConnection]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.VirtualNetworkGatewayConnection"]] = None, **kwargs): """ :keyword value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnection] """ super().__init__(**kwargs) self.value = value self.next_link = None class VirtualNetworkGatewayIPConfiguration(SubResource): """IP configuration for virtual network gateway. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar private_ip_allocation_method: The private IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :vartype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :ivar subnet: The reference of the subnet resource. :vartype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar public_ip_address: The reference of the public IP resource. :vartype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"}, "subnet": {"key": "properties.subnet", "type": "SubResource"}, "public_ip_address": {"key": "properties.publicIPAddress", "type": "SubResource"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None, subnet: Optional["_models.SubResource"] = None, public_ip_address: Optional["_models.SubResource"] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword private_ip_allocation_method: The private IP allocation method. Possible values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic". :paramtype private_ip_allocation_method: str or ~azure.mgmt.network.v2018_07_01.models.IPAllocationMethod :keyword subnet: The reference of the subnet resource. :paramtype subnet: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword public_ip_address: The reference of the public IP resource. :paramtype public_ip_address: ~azure.mgmt.network.v2018_07_01.models.SubResource """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.private_ip_allocation_method = private_ip_allocation_method self.subnet = subnet self.public_ip_address = public_ip_address self.provisioning_state = None class VirtualNetworkGatewayListConnectionsResult(_serialization.Model): """Response for the VirtualNetworkGatewayListConnections API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionListEntity] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[VirtualNetworkGatewayConnectionListEntity]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.VirtualNetworkGatewayConnectionListEntity"]] = None, **kwargs): """ :keyword value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewayConnectionListEntity] """ super().__init__(**kwargs) self.value = value self.next_link = None class VirtualNetworkGatewayListResult(_serialization.Model): """Response for the ListVirtualNetworkGateways API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: Gets a list of VirtualNetworkGateway resources that exists in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "next_link": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[VirtualNetworkGateway]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, value: Optional[List["_models.VirtualNetworkGateway"]] = None, **kwargs): """ :keyword value: Gets a list of VirtualNetworkGateway resources that exists in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGateway] """ super().__init__(**kwargs) self.value = value self.next_link = None class VirtualNetworkGatewaySku(_serialization.Model): """VirtualNetworkGatewaySku details. :ivar name: Gateway SKU name. Known values are: "Basic", "HighPerformance", "Standard", "UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ", "ErGw1AZ", "ErGw2AZ", and "ErGw3AZ". :vartype name: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySkuName :ivar tier: Gateway SKU tier. Known values are: "Basic", "HighPerformance", "Standard", "UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ", "ErGw1AZ", "ErGw2AZ", and "ErGw3AZ". :vartype tier: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySkuTier :ivar capacity: The capacity. :vartype capacity: int """ _attribute_map = { "name": {"key": "name", "type": "str"}, "tier": {"key": "tier", "type": "str"}, "capacity": {"key": "capacity", "type": "int"}, } def __init__( self, *, name: Optional[Union[str, "_models.VirtualNetworkGatewaySkuName"]] = None, tier: Optional[Union[str, "_models.VirtualNetworkGatewaySkuTier"]] = None, capacity: Optional[int] = None, **kwargs ): """ :keyword name: Gateway SKU name. Known values are: "Basic", "HighPerformance", "Standard", "UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ", "ErGw1AZ", "ErGw2AZ", and "ErGw3AZ". :paramtype name: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySkuName :keyword tier: Gateway SKU tier. Known values are: "Basic", "HighPerformance", "Standard", "UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw1AZ", "VpnGw2AZ", "VpnGw3AZ", "ErGw1AZ", "ErGw2AZ", and "ErGw3AZ". :paramtype tier: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkGatewaySkuTier :keyword capacity: The capacity. :paramtype capacity: int """ super().__init__(**kwargs) self.name = name self.tier = tier self.capacity = capacity class VirtualNetworkListResult(_serialization.Model): """Response for the ListVirtualNetworks API service call. :ivar value: Gets a list of VirtualNetwork resources in a resource group. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetwork] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VirtualNetwork]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VirtualNetwork"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: Gets a list of VirtualNetwork resources in a resource group. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetwork] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class VirtualNetworkListUsageResult(_serialization.Model): """Response for the virtual networks GetUsage API service call. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: VirtualNetwork usage stats. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkUsage] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _validation = { "value": {"readonly": True}, } _attribute_map = { "value": {"key": "value", "type": "[VirtualNetworkUsage]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__(self, *, next_link: Optional[str] = None, **kwargs): """ :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = None self.next_link = next_link class VirtualNetworkPeering(SubResource): # pylint: disable=too-many-instance-attributes """Peerings in a virtual network resource. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar allow_virtual_network_access: Whether the VMs in the linked virtual network space would be able to access all the VMs in local Virtual network space. :vartype allow_virtual_network_access: bool :ivar allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the remote virtual network will be allowed/disallowed. :vartype allow_forwarded_traffic: bool :ivar allow_gateway_transit: If gateway links can be used in remote virtual networking to link to this virtual network. :vartype allow_gateway_transit: bool :ivar use_remote_gateways: If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway. :vartype use_remote_gateways: bool :ivar remote_virtual_network: The reference of the remote virtual network. The remote virtual network can be in the same or different region (preview). See here to register for the preview and learn more (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering). :vartype remote_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar remote_address_space: The reference of the remote virtual network address space. :vartype remote_address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :ivar peering_state: The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and 'Disconnected'. Known values are: "Initiated", "Connected", and "Disconnected". :vartype peering_state: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeeringState :ivar provisioning_state: The provisioning state of the resource. :vartype provisioning_state: str """ _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "allow_virtual_network_access": {"key": "properties.allowVirtualNetworkAccess", "type": "bool"}, "allow_forwarded_traffic": {"key": "properties.allowForwardedTraffic", "type": "bool"}, "allow_gateway_transit": {"key": "properties.allowGatewayTransit", "type": "bool"}, "use_remote_gateways": {"key": "properties.useRemoteGateways", "type": "bool"}, "remote_virtual_network": {"key": "properties.remoteVirtualNetwork", "type": "SubResource"}, "remote_address_space": {"key": "properties.remoteAddressSpace", "type": "AddressSpace"}, "peering_state": {"key": "properties.peeringState", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, allow_virtual_network_access: Optional[bool] = None, allow_forwarded_traffic: Optional[bool] = None, allow_gateway_transit: Optional[bool] = None, use_remote_gateways: Optional[bool] = None, remote_virtual_network: Optional["_models.SubResource"] = None, remote_address_space: Optional["_models.AddressSpace"] = None, peering_state: Optional[Union[str, "_models.VirtualNetworkPeeringState"]] = None, provisioning_state: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword allow_virtual_network_access: Whether the VMs in the linked virtual network space would be able to access all the VMs in local Virtual network space. :paramtype allow_virtual_network_access: bool :keyword allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the remote virtual network will be allowed/disallowed. :paramtype allow_forwarded_traffic: bool :keyword allow_gateway_transit: If gateway links can be used in remote virtual networking to link to this virtual network. :paramtype allow_gateway_transit: bool :keyword use_remote_gateways: If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway. :paramtype use_remote_gateways: bool :keyword remote_virtual_network: The reference of the remote virtual network. The remote virtual network can be in the same or different region (preview). See here to register for the preview and learn more (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering). :paramtype remote_virtual_network: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword remote_address_space: The reference of the remote virtual network address space. :paramtype remote_address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :keyword peering_state: The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and 'Disconnected'. Known values are: "Initiated", "Connected", and "Disconnected". :paramtype peering_state: str or ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeeringState :keyword provisioning_state: The provisioning state of the resource. :paramtype provisioning_state: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.allow_virtual_network_access = allow_virtual_network_access self.allow_forwarded_traffic = allow_forwarded_traffic self.allow_gateway_transit = allow_gateway_transit self.use_remote_gateways = use_remote_gateways self.remote_virtual_network = remote_virtual_network self.remote_address_space = remote_address_space self.peering_state = peering_state self.provisioning_state = provisioning_state class VirtualNetworkPeeringListResult(_serialization.Model): """Response for ListSubnets API service call. Retrieves all subnets that belong to a virtual network. :ivar value: The peerings in a virtual network. :vartype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeering] :ivar next_link: The URL to get the next set of results. :vartype next_link: str """ _attribute_map = { "value": {"key": "value", "type": "[VirtualNetworkPeering]"}, "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, value: Optional[List["_models.VirtualNetworkPeering"]] = None, next_link: Optional[str] = None, **kwargs ): """ :keyword value: The peerings in a virtual network. :paramtype value: list[~azure.mgmt.network.v2018_07_01.models.VirtualNetworkPeering] :keyword next_link: The URL to get the next set of results. :paramtype next_link: str """ super().__init__(**kwargs) self.value = value self.next_link = next_link class VirtualNetworkUsage(_serialization.Model): """Usage details for subnet. Variables are only populated by the server, and will be ignored when sending a request. :ivar current_value: Indicates number of IPs used from the Subnet. :vartype current_value: float :ivar id: Subnet identifier. :vartype id: str :ivar limit: Indicates the size of the subnet. :vartype limit: float :ivar name: The name containing common and localized value for usage. :vartype name: ~azure.mgmt.network.v2018_07_01.models.VirtualNetworkUsageName :ivar unit: Usage units. Returns 'Count'. :vartype unit: str """ _validation = { "current_value": {"readonly": True}, "id": {"readonly": True}, "limit": {"readonly": True}, "name": {"readonly": True}, "unit": {"readonly": True}, } _attribute_map = { "current_value": {"key": "currentValue", "type": "float"}, "id": {"key": "id", "type": "str"}, "limit": {"key": "limit", "type": "float"}, "name": {"key": "name", "type": "VirtualNetworkUsageName"}, "unit": {"key": "unit", "type": "str"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.current_value = None self.id = None self.limit = None self.name = None self.unit = None class VirtualNetworkUsageName(_serialization.Model): """Usage strings container. Variables are only populated by the server, and will be ignored when sending a request. :ivar localized_value: Localized subnet size and usage string. :vartype localized_value: str :ivar value: Subnet size and usage string. :vartype value: str """ _validation = { "localized_value": {"readonly": True}, "value": {"readonly": True}, } _attribute_map = { "localized_value": {"key": "localizedValue", "type": "str"}, "value": {"key": "value", "type": "str"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.localized_value = None self.value = None class VirtualWAN(Resource): """VirtualWAN Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar disable_vpn_encryption: Vpn encryption to be disabled or not. :vartype disable_vpn_encryption: bool :ivar virtual_hubs: List of VirtualHubs in the VirtualWAN. :vartype virtual_hubs: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar vpn_sites: :vartype vpn_sites: list[~azure.mgmt.network.v2018_07_01.models.SubResource] :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, "virtual_hubs": {"readonly": True}, "vpn_sites": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "disable_vpn_encryption": {"key": "properties.disableVpnEncryption", "type": "bool"}, "virtual_hubs": {"key": "properties.virtualHubs", "type": "[SubResource]"}, "vpn_sites": {"key": "properties.vpnSites", "type": "[SubResource]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, disable_vpn_encryption: Optional[bool] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword disable_vpn_encryption: Vpn encryption to be disabled or not. :paramtype disable_vpn_encryption: bool :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.disable_vpn_encryption = disable_vpn_encryption self.virtual_hubs = None self.vpn_sites = None self.provisioning_state = provisioning_state class VpnClientConfiguration(_serialization.Model): """VpnClientConfiguration for P2S client. :ivar vpn_client_address_pool: The reference of the address space resource which represents Address space for P2S VpnClient. :vartype vpn_client_address_pool: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :ivar vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway. :vartype vpn_client_root_certificates: list[~azure.mgmt.network.v2018_07_01.models.VpnClientRootCertificate] :ivar vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network gateway. :vartype vpn_client_revoked_certificates: list[~azure.mgmt.network.v2018_07_01.models.VpnClientRevokedCertificate] :ivar vpn_client_protocols: VpnClientProtocols for Virtual network gateway. :vartype vpn_client_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.VpnClientProtocol] :ivar vpn_client_ipsec_policies: VpnClientIpsecPolicies for virtual network gateway P2S client. :vartype vpn_client_ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :ivar radius_server_address: The radius server address property of the VirtualNetworkGateway resource for vpn client connection. :vartype radius_server_address: str :ivar radius_server_secret: The radius secret property of the VirtualNetworkGateway resource for vpn client connection. :vartype radius_server_secret: str """ _attribute_map = { "vpn_client_address_pool": {"key": "vpnClientAddressPool", "type": "AddressSpace"}, "vpn_client_root_certificates": {"key": "vpnClientRootCertificates", "type": "[VpnClientRootCertificate]"}, "vpn_client_revoked_certificates": { "key": "vpnClientRevokedCertificates", "type": "[VpnClientRevokedCertificate]", }, "vpn_client_protocols": {"key": "vpnClientProtocols", "type": "[str]"}, "vpn_client_ipsec_policies": {"key": "vpnClientIpsecPolicies", "type": "[IpsecPolicy]"}, "radius_server_address": {"key": "radiusServerAddress", "type": "str"}, "radius_server_secret": {"key": "radiusServerSecret", "type": "str"}, } def __init__( self, *, vpn_client_address_pool: Optional["_models.AddressSpace"] = None, vpn_client_root_certificates: Optional[List["_models.VpnClientRootCertificate"]] = None, vpn_client_revoked_certificates: Optional[List["_models.VpnClientRevokedCertificate"]] = None, vpn_client_protocols: Optional[List[Union[str, "_models.VpnClientProtocol"]]] = None, vpn_client_ipsec_policies: Optional[List["_models.IpsecPolicy"]] = None, radius_server_address: Optional[str] = None, radius_server_secret: Optional[str] = None, **kwargs ): """ :keyword vpn_client_address_pool: The reference of the address space resource which represents Address space for P2S VpnClient. :paramtype vpn_client_address_pool: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :keyword vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway. :paramtype vpn_client_root_certificates: list[~azure.mgmt.network.v2018_07_01.models.VpnClientRootCertificate] :keyword vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network gateway. :paramtype vpn_client_revoked_certificates: list[~azure.mgmt.network.v2018_07_01.models.VpnClientRevokedCertificate] :keyword vpn_client_protocols: VpnClientProtocols for Virtual network gateway. :paramtype vpn_client_protocols: list[str or ~azure.mgmt.network.v2018_07_01.models.VpnClientProtocol] :keyword vpn_client_ipsec_policies: VpnClientIpsecPolicies for virtual network gateway P2S client. :paramtype vpn_client_ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :keyword radius_server_address: The radius server address property of the VirtualNetworkGateway resource for vpn client connection. :paramtype radius_server_address: str :keyword radius_server_secret: The radius secret property of the VirtualNetworkGateway resource for vpn client connection. :paramtype radius_server_secret: str """ super().__init__(**kwargs) self.vpn_client_address_pool = vpn_client_address_pool self.vpn_client_root_certificates = vpn_client_root_certificates self.vpn_client_revoked_certificates = vpn_client_revoked_certificates self.vpn_client_protocols = vpn_client_protocols self.vpn_client_ipsec_policies = vpn_client_ipsec_policies self.radius_server_address = radius_server_address self.radius_server_secret = radius_server_secret class VpnClientIPsecParameters(_serialization.Model): """An IPSec parameters for a virtual network gateway P2S connection. All required parameters must be populated in order to send to Azure. :ivar sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for P2S client. Required. :vartype sa_life_time_seconds: int :ivar sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for P2S client.. Required. :vartype sa_data_size_kilobytes: int :ivar ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and "GCMAES256". :vartype ipsec_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IpsecEncryption :ivar ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values are: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256". :vartype ipsec_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IpsecIntegrity :ivar ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values are: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", and "GCMAES128". :vartype ike_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IkeEncryption :ivar ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", and "GCMAES128". :vartype ike_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IkeIntegrity :ivar dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values are: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and "DHGroup24". :vartype dh_group: str or ~azure.mgmt.network.v2018_07_01.models.DhGroup :ivar pfs_group: The Pfs Groups used in IKE Phase 2 for new child SA. Required. Known values are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", and "PFSMM". :vartype pfs_group: str or ~azure.mgmt.network.v2018_07_01.models.PfsGroup """ _validation = { "sa_life_time_seconds": {"required": True}, "sa_data_size_kilobytes": {"required": True}, "ipsec_encryption": {"required": True}, "ipsec_integrity": {"required": True}, "ike_encryption": {"required": True}, "ike_integrity": {"required": True}, "dh_group": {"required": True}, "pfs_group": {"required": True}, } _attribute_map = { "sa_life_time_seconds": {"key": "saLifeTimeSeconds", "type": "int"}, "sa_data_size_kilobytes": {"key": "saDataSizeKilobytes", "type": "int"}, "ipsec_encryption": {"key": "ipsecEncryption", "type": "str"}, "ipsec_integrity": {"key": "ipsecIntegrity", "type": "str"}, "ike_encryption": {"key": "ikeEncryption", "type": "str"}, "ike_integrity": {"key": "ikeIntegrity", "type": "str"}, "dh_group": {"key": "dhGroup", "type": "str"}, "pfs_group": {"key": "pfsGroup", "type": "str"}, } def __init__( self, *, sa_life_time_seconds: int, sa_data_size_kilobytes: int, ipsec_encryption: Union[str, "_models.IpsecEncryption"], ipsec_integrity: Union[str, "_models.IpsecIntegrity"], ike_encryption: Union[str, "_models.IkeEncryption"], ike_integrity: Union[str, "_models.IkeIntegrity"], dh_group: Union[str, "_models.DhGroup"], pfs_group: Union[str, "_models.PfsGroup"], **kwargs ): """ :keyword sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for P2S client. Required. :paramtype sa_life_time_seconds: int :keyword sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for P2S client.. Required. :paramtype sa_data_size_kilobytes: int :keyword ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and "GCMAES256". :paramtype ipsec_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IpsecEncryption :keyword ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values are: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256". :paramtype ipsec_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IpsecIntegrity :keyword ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values are: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", and "GCMAES128". :paramtype ike_encryption: str or ~azure.mgmt.network.v2018_07_01.models.IkeEncryption :keyword ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", and "GCMAES128". :paramtype ike_integrity: str or ~azure.mgmt.network.v2018_07_01.models.IkeIntegrity :keyword dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values are: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and "DHGroup24". :paramtype dh_group: str or ~azure.mgmt.network.v2018_07_01.models.DhGroup :keyword pfs_group: The Pfs Groups used in IKE Phase 2 for new child SA. Required. Known values are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", and "PFSMM". :paramtype pfs_group: str or ~azure.mgmt.network.v2018_07_01.models.PfsGroup """ super().__init__(**kwargs) self.sa_life_time_seconds = sa_life_time_seconds self.sa_data_size_kilobytes = sa_data_size_kilobytes self.ipsec_encryption = ipsec_encryption self.ipsec_integrity = ipsec_integrity self.ike_encryption = ike_encryption self.ike_integrity = ike_integrity self.dh_group = dh_group self.pfs_group = pfs_group class VpnClientParameters(_serialization.Model): """Vpn Client Parameters for package generation. :ivar processor_architecture: VPN client Processor Architecture. Possible values are: 'AMD64' and 'X86'. Known values are: "Amd64" and "X86". :vartype processor_architecture: str or ~azure.mgmt.network.v2018_07_01.models.ProcessorArchitecture :ivar authentication_method: VPN client Authentication Method. Possible values are: 'EAPTLS' and 'EAPMSCHAPv2'. Known values are: "EAPTLS" and "EAPMSCHAPv2". :vartype authentication_method: str or ~azure.mgmt.network.v2018_07_01.models.AuthenticationMethod :ivar radius_server_auth_certificate: The public certificate data for the radius server authentication certificate as a Base-64 encoded string. Required only if external radius authentication has been configured with EAPTLS authentication. :vartype radius_server_auth_certificate: str :ivar client_root_certificates: A list of client root certificates public certificate data encoded as Base-64 strings. Optional parameter for external radius based authentication with EAPTLS. :vartype client_root_certificates: list[str] """ _attribute_map = { "processor_architecture": {"key": "processorArchitecture", "type": "str"}, "authentication_method": {"key": "authenticationMethod", "type": "str"}, "radius_server_auth_certificate": {"key": "radiusServerAuthCertificate", "type": "str"}, "client_root_certificates": {"key": "clientRootCertificates", "type": "[str]"}, } def __init__( self, *, processor_architecture: Optional[Union[str, "_models.ProcessorArchitecture"]] = None, authentication_method: Optional[Union[str, "_models.AuthenticationMethod"]] = None, radius_server_auth_certificate: Optional[str] = None, client_root_certificates: Optional[List[str]] = None, **kwargs ): """ :keyword processor_architecture: VPN client Processor Architecture. Possible values are: 'AMD64' and 'X86'. Known values are: "Amd64" and "X86". :paramtype processor_architecture: str or ~azure.mgmt.network.v2018_07_01.models.ProcessorArchitecture :keyword authentication_method: VPN client Authentication Method. Possible values are: 'EAPTLS' and 'EAPMSCHAPv2'. Known values are: "EAPTLS" and "EAPMSCHAPv2". :paramtype authentication_method: str or ~azure.mgmt.network.v2018_07_01.models.AuthenticationMethod :keyword radius_server_auth_certificate: The public certificate data for the radius server authentication certificate as a Base-64 encoded string. Required only if external radius authentication has been configured with EAPTLS authentication. :paramtype radius_server_auth_certificate: str :keyword client_root_certificates: A list of client root certificates public certificate data encoded as Base-64 strings. Optional parameter for external radius based authentication with EAPTLS. :paramtype client_root_certificates: list[str] """ super().__init__(**kwargs) self.processor_architecture = processor_architecture self.authentication_method = authentication_method self.radius_server_auth_certificate = radius_server_auth_certificate self.client_root_certificates = client_root_certificates class VpnClientRevokedCertificate(SubResource): """VPN client revoked certificate of virtual network gateway. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar thumbprint: The revoked VPN client certificate thumbprint. :vartype thumbprint: str :ivar provisioning_state: The provisioning state of the VPN client revoked certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "thumbprint": {"key": "properties.thumbprint", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, thumbprint: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword thumbprint: The revoked VPN client certificate thumbprint. :paramtype thumbprint: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.thumbprint = thumbprint self.provisioning_state = None class VpnClientRootCertificate(SubResource): """VPN client root certificate of virtual network gateway. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar public_cert_data: The certificate public data. Required. :vartype public_cert_data: str :ivar provisioning_state: The provisioning state of the VPN client root certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :vartype provisioning_state: str """ _validation = { "public_cert_data": {"required": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "public_cert_data": {"key": "properties.publicCertData", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, public_cert_data: str, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, etag: Optional[str] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword etag: A unique read-only string that changes whenever the resource is updated. :paramtype etag: str :keyword public_cert_data: The certificate public data. Required. :paramtype public_cert_data: str """ super().__init__(id=id, **kwargs) self.name = name self.etag = etag self.public_cert_data = public_cert_data self.provisioning_state = None class VpnConnection(SubResource): # pylint: disable=too-many-instance-attributes """VpnConnection Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :vartype name: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar remote_vpn_site: Id of the connected vpn site. :vartype remote_vpn_site: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar routing_weight: routing weight for vpn connection. :vartype routing_weight: int :ivar connection_status: The connection status. Known values are: "Unknown", "Connecting", "Connected", and "NotConnected". :vartype connection_status: str or ~azure.mgmt.network.v2018_07_01.models.VpnConnectionStatus :ivar ingress_bytes_transferred: Ingress bytes transferred. :vartype ingress_bytes_transferred: int :ivar egress_bytes_transferred: Egress bytes transferred. :vartype egress_bytes_transferred: int :ivar connection_bandwidth_in_mbps: Expected bandwidth in MBPS. :vartype connection_bandwidth_in_mbps: int :ivar shared_key: SharedKey for the vpn connection. :vartype shared_key: str :ivar enable_bgp: EnableBgp flag. :vartype enable_bgp: bool :ivar ipsec_policies: The IPSec Policies to be considered by this connection. :vartype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "etag": {"readonly": True}, "connection_status": {"readonly": True}, "ingress_bytes_transferred": {"readonly": True}, "egress_bytes_transferred": {"readonly": True}, "connection_bandwidth_in_mbps": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "remote_vpn_site": {"key": "properties.remoteVpnSite", "type": "SubResource"}, "routing_weight": {"key": "properties.routingWeight", "type": "int"}, "connection_status": {"key": "properties.connectionStatus", "type": "str"}, "ingress_bytes_transferred": {"key": "properties.ingressBytesTransferred", "type": "int"}, "egress_bytes_transferred": {"key": "properties.egressBytesTransferred", "type": "int"}, "connection_bandwidth_in_mbps": {"key": "properties.connectionBandwidthInMbps", "type": "int"}, "shared_key": {"key": "properties.sharedKey", "type": "str"}, "enable_bgp": {"key": "properties.enableBgp", "type": "bool"}, "ipsec_policies": {"key": "properties.ipsecPolicies", "type": "[IpsecPolicy]"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin name: Optional[str] = None, remote_vpn_site: Optional["_models.SubResource"] = None, routing_weight: Optional[int] = None, shared_key: Optional[str] = None, enable_bgp: Optional[bool] = None, ipsec_policies: Optional[List["_models.IpsecPolicy"]] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :paramtype name: str :keyword remote_vpn_site: Id of the connected vpn site. :paramtype remote_vpn_site: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword routing_weight: routing weight for vpn connection. :paramtype routing_weight: int :keyword shared_key: SharedKey for the vpn connection. :paramtype shared_key: str :keyword enable_bgp: EnableBgp flag. :paramtype enable_bgp: bool :keyword ipsec_policies: The IPSec Policies to be considered by this connection. :paramtype ipsec_policies: list[~azure.mgmt.network.v2018_07_01.models.IpsecPolicy] :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, **kwargs) self.name = name self.etag = None self.remote_vpn_site = remote_vpn_site self.routing_weight = routing_weight self.connection_status = None self.ingress_bytes_transferred = None self.egress_bytes_transferred = None self.connection_bandwidth_in_mbps = None self.shared_key = shared_key self.enable_bgp = enable_bgp self.ipsec_policies = ipsec_policies self.provisioning_state = provisioning_state class VpnDeviceScriptParameters(_serialization.Model): """Vpn device configuration script generation parameters. :ivar vendor: The vendor for the vpn device. :vartype vendor: str :ivar device_family: The device family for the vpn device. :vartype device_family: str :ivar firmware_version: The firmware version for the vpn device. :vartype firmware_version: str """ _attribute_map = { "vendor": {"key": "vendor", "type": "str"}, "device_family": {"key": "deviceFamily", "type": "str"}, "firmware_version": {"key": "firmwareVersion", "type": "str"}, } def __init__( self, *, vendor: Optional[str] = None, device_family: Optional[str] = None, firmware_version: Optional[str] = None, **kwargs ): """ :keyword vendor: The vendor for the vpn device. :paramtype vendor: str :keyword device_family: The device family for the vpn device. :paramtype device_family: str :keyword firmware_version: The firmware version for the vpn device. :paramtype firmware_version: str """ super().__init__(**kwargs) self.vendor = vendor self.device_family = device_family self.firmware_version = firmware_version class VpnGateway(Resource): # pylint: disable=too-many-instance-attributes """VpnGateway Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar virtual_hub: The VirtualHub to which the gateway belongs. :vartype virtual_hub: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar connections: list of all vpn connections to the gateway. :vartype connections: list[~azure.mgmt.network.v2018_07_01.models.VpnConnection] :ivar bgp_settings: Local network gateway's BGP speaker settings. :vartype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :ivar policies: The policies applied to this vpn gateway. :vartype policies: ~azure.mgmt.network.v2018_07_01.models.Policies """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "virtual_hub": {"key": "properties.virtualHub", "type": "SubResource"}, "connections": {"key": "properties.connections", "type": "[VpnConnection]"}, "bgp_settings": {"key": "properties.bgpSettings", "type": "BgpSettings"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "policies": {"key": "properties.policies", "type": "Policies"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, virtual_hub: Optional["_models.SubResource"] = None, connections: Optional[List["_models.VpnConnection"]] = None, bgp_settings: Optional["_models.BgpSettings"] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, policies: Optional["_models.Policies"] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword virtual_hub: The VirtualHub to which the gateway belongs. :paramtype virtual_hub: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword connections: list of all vpn connections to the gateway. :paramtype connections: list[~azure.mgmt.network.v2018_07_01.models.VpnConnection] :keyword bgp_settings: Local network gateway's BGP speaker settings. :paramtype bgp_settings: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState :keyword policies: The policies applied to this vpn gateway. :paramtype policies: ~azure.mgmt.network.v2018_07_01.models.Policies """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.virtual_hub = virtual_hub self.connections = connections self.bgp_settings = bgp_settings self.provisioning_state = provisioning_state self.policies = policies class VpnSite(Resource): # pylint: disable=too-many-instance-attributes """VpnSite Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar location: Resource location. :vartype location: str :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar virtual_wan: The VirtualWAN to which the vpnSite belongs. :vartype virtual_wan: ~azure.mgmt.network.v2018_07_01.models.SubResource :ivar device_properties: The device properties. :vartype device_properties: ~azure.mgmt.network.v2018_07_01.models.DeviceProperties :ivar ip_address: The ip-address for the vpn-site. :vartype ip_address: str :ivar site_key: The key for vpn-site that can be used for connections. :vartype site_key: str :ivar address_space: The AddressSpace that contains an array of IP address ranges. :vartype address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :ivar bgp_properties: The set of bgp properties. :vartype bgp_properties: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :ivar provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ _validation = { "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "location": {"key": "location", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "etag": {"key": "etag", "type": "str"}, "virtual_wan": {"key": "properties.virtualWAN", "type": "SubResource"}, "device_properties": {"key": "properties.deviceProperties", "type": "DeviceProperties"}, "ip_address": {"key": "properties.ipAddress", "type": "str"}, "site_key": {"key": "properties.siteKey", "type": "str"}, "address_space": {"key": "properties.addressSpace", "type": "AddressSpace"}, "bgp_properties": {"key": "properties.bgpProperties", "type": "BgpSettings"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, virtual_wan: Optional["_models.SubResource"] = None, device_properties: Optional["_models.DeviceProperties"] = None, ip_address: Optional[str] = None, site_key: Optional[str] = None, address_space: Optional["_models.AddressSpace"] = None, bgp_properties: Optional["_models.BgpSettings"] = None, provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None, **kwargs ): """ :keyword id: Resource ID. :paramtype id: str :keyword location: Resource location. :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword virtual_wan: The VirtualWAN to which the vpnSite belongs. :paramtype virtual_wan: ~azure.mgmt.network.v2018_07_01.models.SubResource :keyword device_properties: The device properties. :paramtype device_properties: ~azure.mgmt.network.v2018_07_01.models.DeviceProperties :keyword ip_address: The ip-address for the vpn-site. :paramtype ip_address: str :keyword site_key: The key for vpn-site that can be used for connections. :paramtype site_key: str :keyword address_space: The AddressSpace that contains an array of IP address ranges. :paramtype address_space: ~azure.mgmt.network.v2018_07_01.models.AddressSpace :keyword bgp_properties: The set of bgp properties. :paramtype bgp_properties: ~azure.mgmt.network.v2018_07_01.models.BgpSettings :keyword provisioning_state: The provisioning state of the resource. Known values are: "Succeeded", "Updating", "Deleting", and "Failed". :paramtype provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState """ super().__init__(id=id, location=location, tags=tags, **kwargs) self.etag = None self.virtual_wan = virtual_wan self.device_properties = device_properties self.ip_address = ip_address self.site_key = site_key self.address_space = address_space self.bgp_properties = bgp_properties self.provisioning_state = provisioning_state class VpnSiteId(_serialization.Model): """VpnSite Resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar vpn_site: The resource-uri of the vpn-site for which config is to be fetched. :vartype vpn_site: str """ _validation = { "vpn_site": {"readonly": True}, } _attribute_map = { "vpn_site": {"key": "vpnSite", "type": "str"}, } def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.vpn_site = None
{ "content_hash": "da94ab8ef25438719e9554222ddc7b53", "timestamp": "", "source": "github", "line_count": 16010, "max_line_length": 540, "avg_line_length": 45.1892567145534, "alnum_prop": 0.6541272737325151, "repo_name": "Azure/azure-sdk-for-python", "id": "2c64277facbc30bb4a26dea7c6e2dfeffdb754fe", "size": "723981", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/_models_py3.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
from elasticsearch import Elasticsearch from random import randint from collections import defaultdict import scipy.stats as stats import requests import time import re import subprocess import config import gzip import argparse import os import json #globals timeout=300 es = Elasticsearch( [{'host': config.elastic_host,'port': config.elastic_port}], ) #total number of publications #curl -XGET 'localhost:9200/semmeddb/_search?pretty' -H "Content-Type: application/json" -d '{"size":0, "aggs" : {"type_count":{"cardinality" :{ "field" : "PMID" }}}}' globalPub=17734131 #time python scripts/run.py -m compare -a 'CR1,CCDC6,KAT8' -b 'Alzheimers_disease' #globals predIgnore = ['PART_OF','ISA','LOCATION_OF','PROCESS_OF','ADMINISTERED_TO','METHOD_OF','USES','compared_with'] termIgnore=['Patients','Disease','Genes','Proteins'] typeFilterList = ["aapp","enzy","gngm","chem","clnd","horm","hops","inch","lipd","neop","orch"] def run_query(filterData,index,size=100000): #print(index) start=time.time() res=es.search( request_timeout=timeout, index=index, body={ "size":size, #{}"profile": True, "query": { "bool" : { "filter" : filterData } } }) end = time.time() t=round((end - start), 4) #print "Time taken:",t, "seconds" #print res['hits']['total'] return t,res['hits']['total'],res['hits']['hits'] def get_semmed_data(index='semmeddb_triple_freqs',query=[]): filterData={"terms":{"SUB_PRED_OBJ":query}} start=time.time() res=es.search( request_timeout=timeout, index=index, body={ "size":1000000, "query": { "bool" : { "filter" : filterData } } } ) end = time.time() t=round((end - start), 4) #print "Time taken:",t, "seconds" return res['hits']['hits'] def create_es_filter(pmidList): #https://github.com/MRCIEU/melodi/blob/master/data/SRDEF.txt #typeFilterList = [ # "aapp","amas","anab","bacs","biof","bpoc","celf","chem","comd","dsyn","emod","enzy","genf","gngm","hcpp","hops","horm","imft","inch", # "moft","mosq","neop","nnon","nsba","orch","orgf","ortf","patf","rcpt","sbst","socb","tisu","topp","virs","vita"] #typeFilterList = [ # "aapp","amas","bacs","celf","enzy","gngm","horm","orch"] #typeFilterList = ["aapp","enzy","gngm"] #typeFilterList = ["aapp","enzy","gngm","chem","clnd","dsyn","genf","horm","hops","inch","lipd","neop","orch"] filterOptions = [ {"terms":{"PMID":pmidList}}, {"terms":{"OBJECT_SEMTYPE":typeFilterList}}, {"terms":{"SUBJECT_SEMTYPE":typeFilterList}}, ] return filterOptions def es_query(filterData,index,predCounts,resDic,pubDic): #print(filterData) t,resCount,res=run_query(filterData,index) print(resCount) if resCount>0: #print(filterData) #print t,resCount for r in res: PMID=r['_source']['PMID'] #PREDICATION_ID=r['_source']['PREDICATION_ID'] PREDICATE=r['_source']['PREDICATE'] OBJECT_NAME=r['_source']['OBJECT_NAME'] OBJECT_TYPE=r['_source']['OBJECT_SEMTYPE'] OBJECT_CUI=r['_source']['OBJECT_CUI'] SUBJECT_NAME=r['_source']['SUBJECT_NAME'] SUBJECT_TYPE=r['_source']['SUBJECT_SEMTYPE'] SUBJECT_CUI=r['_source']['SUBJECT_CUI'] PREDICATION_ID=SUBJECT_NAME+':'+PREDICATE+':'+OBJECT_NAME #filter on predicate if PREDICATE not in predIgnore and OBJECT_NAME not in termIgnore and SUBJECT_NAME not in termIgnore: resDic[PREDICATION_ID]={'sub':SUBJECT_NAME,'subType':SUBJECT_TYPE,'subCUI':SUBJECT_CUI,'pred':PREDICATE,'obj':OBJECT_NAME,'objType':OBJECT_TYPE,'objCUI':OBJECT_CUI} if PREDICATION_ID in pubDic: pubDic[PREDICATION_ID].add(PMID) else: pubDic[PREDICATION_ID] = {PMID} #print PMID,PREDICATION_ID if PREDICATION_ID in predCounts: predCounts[PREDICATION_ID]+=1 else: predCounts[PREDICATION_ID]=1 return t,resCount,resDic,predCounts,pubDic def fet(localSem,localPub,globalSem,globalPub): #print(localSem,localPub,globalSem,globalPub) oddsratio, pvalue = stats.fisher_exact([[localSem, localPub], [globalSem, globalPub]]) #print(oddsratio, pvalue) return oddsratio,pvalue def pub_sem(query,sem_trip_dic): start=time.time() print("\n### Getting ids for "+query+" ###") url="http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?" params = {'db': 'pubmed', 'term': query,'retmax':'1000000000','rettype':'uilist'} #r = requests.post(url) # GET with params in URL r = requests.get(url, params=params) #create random file name n = 10 ran=''.join(["%s" % randint(0, 9) for num in range(0, n)]) ranFile = '/tmp/'+ran+'.txt' out = open(ranFile, 'w') out.write(r.text) out.close() r.status_code end=time.time() print("Time taken:",round((end-start)/60,3),"minutes") #count the number of pmids cmd = "grep -c '<Id>' "+ranFile pCount=0 #print cmd #check for empty searches try: pCount = int(subprocess.check_output(cmd, shell=True)) except: print("No results") print("Total pmids: "+str(pCount)) maxA=1000000 counter=0 pmidList=[] totalRes=0 predCounts={} resDic={} pubDic={} chunkSize=10000 updateSize=10000 filterOptions = create_es_filter(pmidList) if 0<pCount<maxA: print("\n### Parsing ids ###") start = time.time() f = open('/tmp/'+ran+'.txt', 'r') for line in f: l = re.search(r'.*?<Id>(.*?)</Id>', line) if l: pmid = l.group(1) pmidList.append(pmid) counter+=1 if counter % updateSize == 0: pc = round((float(counter)/float(pCount))*100) print(str(pc)+' % : '+str(counter)+' '+str(len(predCounts))) if counter % chunkSize == 0: #print('Querying ES...') t,resCount,resDic,predCounts,pubDic=es_query(filterData=filterOptions,index='semmeddb',predCounts=predCounts,resDic=resDic,pubDic=pubDic) totalRes+=resCount pmidList=[] #print(filterOptions) t,resCount,resDic,predCounts,pubDic=es_query(filterData=filterOptions,index='semmeddb',predCounts=predCounts,resDic=resDic,pubDic=pubDic) totalRes+=resCount pc = round((float(counter)/float(pCount))*100) print(str(pc)+' % : '+str(counter)+' '+str(len(predCounts))) end = time.time() print("\tTime taken:", round((end - start) / 60, 3), "minutes") print('Total results:',totalRes) outFile=query.replace(' ','_')+'.gz' o = gzip.open('data/'+outFile,'w') #print(predCounts) #get global number of publications globalSem=es.count('semmeddb')['count'] #globalSem=25000000 #get triple freqs tripleFreqs = {} print('Geting freqs...',len(predCounts)) #print(predCounts.keys()) freq_res = get_semmed_data(query=list(predCounts.keys())) #print(freq_res) for i in freq_res: tripleFreqs[i['_source']['SUB_PRED_OBJ']]=i['_source']['frequency'] #get gene data from semmeddb-genes - dont need this, CUI for gngm are or contain entrez IDs!!! # print('Getting gene info...') # cuiSet=set() # for r in resDic: # subCUI=resDic[r]['subCUI'] # objCUI=resDic[r]['objCUI'] # cuiSet.add(subCUI) # cuiSet.add(objCUI) # filterData = [ # {"terms":{"CUI":list(cuiSet)}} # ] # genet,geneResCount,geneRes=run_query(filterData,'semmeddb-genes') # cuiToEntrez={} # for g in geneRes: # cuiToEntrez[g['_source']['CUI']]=g['_source']['GENE_ID'] # print(cuiToEntrez) print('Doing enrichment...') start = time.time() counter=0 for k in sorted(predCounts, key=lambda k: predCounts[k], reverse=True): counter+=1 if counter % chunkSize == 0: pc = round((float(counter)/float(len(predCounts)))*100) print(str(pc)+' % : '+str(counter)) if predCounts[k]>1: if freq_res: odds,pval=fet(predCounts[k],totalRes,tripleFreqs[k],globalSem) #if resDic[k]['subCUI'] in cuiToEntrez: # subEntrez=cuiToEntrez[resDic[k]['subCUI']] #else: # subEntrez = 'NA' # print(resDic[k]['subCUI'],'missing') #if resDic[k]['objCUI'] in cuiToEntrez: # objEntrez=cuiToEntrez[resDic[k]['objCUI']] #else: # objEntrez = 'NA' # print(resDic[k]['objCUI'],'missing') t = k+'\t'+resDic[k]['subCUI']+'\t'+resDic[k]['sub']+'\t'+resDic[k]['subType']+'\t'+resDic[k]['pred']+'\t'+resDic[k]['objCUI']+'\t'+resDic[k]['obj']+'\t'+resDic[k]['objType']+'\t'+str(predCounts[k])+'\t'+str(totalRes)+'\t'+str(tripleFreqs[k])+'\t'+str(globalPub)+'\t'+str(odds)+'\t'+str(pval)+'\t'+";".join(pubDic[k])+'\n' o.write(t.encode('utf-8')) else: continue #print(k,'has no freq') o.close() if len(predCounts)>1: pc = round((float(counter)/float(len(predCounts)))*100) else: pc=100 print(str(pc)+' % : '+str(counter)) end = time.time() print("\tTime taken:", round((end - start) / 60, 3), "minutes") else: print('0 or too many articles') def read_sem_triples(): print('getting background freqs...') sem_trip_dic={} start = time.time() with gzip.open('data/semmeddb_triple_freqs.txt.gz') as f: for line in f: s,f = line.rstrip().split('\t') sem_trip_dic[s]=f print(len(sem_trip_dic)) end = time.time() print("\tTime taken:", round((end - start) / 60, 3), "minutes") return sem_trip_dic def compare(aList,bList,name): pValCut=1e-1 #predIgnore = ['PART_OF','ISA','LOCATION_OF','PROCESS_OF','ADMINISTERED_TO','METHOD_OF','USES','COEXISTS_WITH','ASSOCIATED_WITH','compared_with'] #predIgnore = ['PART_OF','ISA','LOCATION_OF','PROCESS_OF','ADMINISTERED_TO','METHOD_OF','USES','compared_with'] aDic=defaultdict(dict) for a in aList.split(','): print(a) fPath=os.path.join('data',a+'.gz') if os.path.isfile(fPath): with gzip.open(fPath,'rb') as f: for line in f: s,subCUI,sub,subType,pred,objCUI,obj,objType,f1,f2,f3,f4,o,p,pubs = line.rstrip().split('\t') if float(p)<pValCut: if pred not in predIgnore: aDic[a][s]={'subCUI':subCUI,'sub':sub,'subType':subType,'objCUI':objCUI,'obj':obj,'objType':objType,'pred':pred,'localCounts':f1,'localTotal':f2,'globalCounts':f3,'globalTotal':f4,'odds':o,'pval':p,'pubs':pubs.split(';')} else: print(fPath,'does not exist') bDic=defaultdict(dict) for b in bList.split(','): print(b) fPath=os.path.join('data',b+'.gz') if os.path.isfile(fPath): with gzip.open(fPath,'rb') as f: for line in f: s,subCUI,sub,subType,pred,objCUI,obj,objType,f1,f2,f3,f4,o,p,pubs = line.rstrip().split('\t') if float(p)<pValCut: #ignore less useful predicates if pred not in predIgnore: bDic[b][s]={'subCUI':subCUI,'sub':sub,'subType':subType,'objCUI':objCUI,'obj':obj,'objType':objType,'pred':pred,'localCounts':f1,'localTotal':f2,'globalCounts':f3,'globalTotal':f4,'odds':o,'pval':p,'pubs':pubs.split(';')} else: print(fPath,'does not exist') print(len(aDic)) print(len(bDic)) #compare two sets of data aComDic=defaultdict(dict) bComDic=defaultdict(dict) joinDic={} predDic={} joinCount=0 for a in aDic: print(a) counter=0 for s1 in aDic[a]: counter+=1 pc = round((float(counter)/float(len(aDic[a])))*100,1) #print(counter,pc,pc%10) if pc % 10 == 0: print(pc,'%') aSub,aPred,aObj = aDic[a][s1]['sub'],aDic[a][s1]['pred'],aDic[a][s1]['obj'] if aSub in termIgnore or aObj in termIgnore: continue for b in bDic: #print(b) for s2 in bDic[b]: #print(s1,s2) bSub,bPred,bObj = bDic[b][s2]['sub'],bDic[b][s2]['pred'],bDic[b][s2]['obj'] #print(aObj,bSub) #testing removal of words if bSub in termIgnore or bObj in termIgnore: continue if aObj == bSub: if aPred in predDic: predDic[aPred]+=1 else: predDic[aPred]=1 if bPred in predDic: predDic[bPred]+=1 else: predDic[bPred]=1 #print a,s1,aDic[a][s1],b,s2,bDic[b][s2] aComDic[a][s1]=aDic[a][s1] bComDic[b][s2]=bDic[b][s2] joinCount+=1 joinDic[joinCount]={'s1':s1,'aSubCUI':aDic[a][s1]['subCUI'],'aSub':aSub,'aSubType':aDic[a][s1]['subType'],'aPred':aPred,'aObjCUI':aDic[a][s1]['objCUI'],'aObj':aObj,'aObjType':aDic[a][s1]['objType'],'s2':s2,'bSubCUI':bDic[b][s2]['subCUI'],'bSub':bSub,'bSubType':bDic[b][s2]['subType'],'bPred':bPred,'bObjCUI':bDic[b][s2]['objCUI'],'bObj':bObj,'bObjType':bDic[b][s2]['subType'],'overlap':aObj,'d1':a,'d2':b} #get some summaries print(predDic) for c in aComDic: print(c,len(aComDic[c])) if not os.path.exists(os.path.join('data','compare',name)): os.mkdir(os.path.join('data','compare',name)) print("Directory " , name , " created ") else: print("Directory " , name , " already exists") with open(os.path.join('data','compare',name,'a_nodes.json'),'w') as outfile: #outfile={'source':a:'sem':s1:aDic[a][s1]} json.dump(aComDic,outfile) with open(os.path.join('data','compare',name,'b_nodes.json'),'w') as outfile: #outfile={'source':a:'sem':s1:aDic[a][s1]} json.dump(bComDic,outfile) o = open(os.path.join('data','compare',name,'rels.tsv'),'w') for i in joinDic: #outfile={'source':a:'sem':s1:aDic[a][s1]} o.write(str(i)+'\t'+joinDic[i]['s1']+'\t'+joinDic[i]['aSub']+'\t'+joinDic[i]['aSubType']+'\t'+joinDic[i]['aPred']+'\t'+joinDic[i]['aObj']+'\t'+joinDic[i]['s2']+'\t'+joinDic[i]['bSub']+'\t'+joinDic[i]['bPred']+'\t'+joinDic[i]['bObj']+'\t'+joinDic[i]['overlap']+'\t'+joinDic[i]['d1']+'\t'+joinDic[i]['d2']+'\n') o.close() #full summary outFile o = open(os.path.join('data','compare',name,'summary.tsv'),'w') o.write('Gene\tPubMedIDs1\tSubject1\tSubject1_Type\tCUI\tPredicate1\tObject1/Subject2\tObject1/Subject2_Type\tCUI\tPredicate2\tObject2\tObject2_Type\tCUI\tPubMedIDs2\tDisease\n') for i in joinDic: a = joinDic[i]['d1'] b = joinDic[i]['d2'] sem1 = joinDic[i]['s1'] sem2 = joinDic[i]['s2'] o.write(joinDic[i]['d1']+'\t'+";".join(aDic[a][sem1]['pubs'])+'\t'+joinDic[i]['aSub']+'\t'+joinDic[i]['aSubType']+'\t'+joinDic[i]['aSubCUI']+'\t'+joinDic[i]['aPred']+'\t'+joinDic[i]['aObj']+'\t'+joinDic[i]['aObjType']+'\t'+joinDic[i]['aObjCUI']+'\t'+joinDic[i]['bPred']+'\t'+joinDic[i]['bObj']+'\t'+joinDic[i]['bObjType']+'\t'+joinDic[i]['bObjCUI']+'\t'+";".join(bDic[b][sem2]['pubs'])+'\t'+joinDic[i]['d2']+'\n') o.close() if __name__ == '__main__': parser = argparse.ArgumentParser(description='SemMedDB enrichment search') #parser.add_argument('integers', metavar='N', type=int, nargs='+', # help='an integer for the accumulator') parser.add_argument('-m,--method', dest='method', help='(get_data, compare)') parser.add_argument('-q,--query', dest='query', help='the pubmed query') parser.add_argument('-a,--query_a', dest='query_a', help='list of enriched data sets') parser.add_argument('-b,--query_b', dest='query_b', help='list of enriched data sets') parser.add_argument('-n,--name', dest='name', help='a name for the comparison') args = parser.parse_args() print(args) if args.method == None: print("Please provide a method (-m): [get_data, compare]") else: if args.method == 'get_data': if args.query == None: print('Please provide a query (-q) [e.g. pcsk9]') else: #sem_trip_dic=read_sem_triples() sem_trip_dic={} print('creating enriched article set') queries=args.query.rstrip().split(',') for q in queries: pub_sem(q,sem_trip_dic) elif args.method == 'compare': if args.query_a == None or args.query_b == None or args.name == None: print('Please provide two lists of data sets to compare (-a and -b) and a name (-n)') else: print('Comparing data...') compare(args.query_a,args.query_b, args.name) #delete_index(args.index_name) else: print("Not a good method") #pub_sem('pcsk9') #pub_sem('oropharyngeal cancer') #pub_sem('prostate cancer') #pub_sem('breast cancer') #get_semmed_data('semmeddb_triple_freqs',filterData={"terms":{"SUB_PRED_OBJ":['Encounter due to counseling:PROCESS_OF:Family']}})
{ "content_hash": "68abb97782354ec855aaef1bc26f4b77", "timestamp": "", "source": "github", "line_count": 443, "max_line_length": 415, "avg_line_length": 35.12415349887133, "alnum_prop": 0.6422879177377891, "repo_name": "MRCIEU/melodi", "id": "ae0069de8637dec4732950ca143539e0ec1a5907", "size": "15560", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "semmeddb-index/scripts/run.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "44467" }, { "name": "CoffeeScript", "bytes": "59265" }, { "name": "HTML", "bytes": "253995" }, { "name": "JavaScript", "bytes": "78844" }, { "name": "Python", "bytes": "250291" }, { "name": "Shell", "bytes": "6079" } ], "symlink_target": "" }
"""Service to cache specific Titan files in appserver memory. Documentation: http://code.google.com/p/titan-files/wiki/MemoryFilesService """ import os from titan.common import datastructures from titan.common import hooks from titan.files import files SERVICE_NAME = 'memory-files' # The default size of the MRU dictionary. This is how many File objects will be # allowed in the global cache at a time. # TODO(user): Allow this to be customized by a service config setting. DEFAULT_MRU_SIZE = 300 _ENVIRON_FILES_STORE_NAME = 'titan-memory-files-store' # The "RegisterService" method is required for all Titan service plugins. def RegisterService(): hooks.RegisterHook(SERVICE_NAME, 'file-get', hook_class=HookForGet) hooks.RegisterHook(SERVICE_NAME, 'file-exists', hook_class=HookForExists) hooks.RegisterHook(SERVICE_NAME, 'file-write', hook_class=HookForWrites) hooks.RegisterHook(SERVICE_NAME, 'file-touch', hook_class=HookForWrites) hooks.RegisterHook(SERVICE_NAME, 'file-delete', hook_class=HookForWrites) hooks.RegisterHook(SERVICE_NAME, 'file-copy', hook_class=HookForCopy) class HookForGet(hooks.Hook): """A hook for files.Get().""" def Pre(self, **kwargs): paths = files.ValidatePaths(kwargs['paths']) file_objs, is_final_result = _Get(paths) if is_final_result: return hooks.TitanMethodResult(file_objs) return {'paths': file_objs} class HookForExists(hooks.Hook): """A hook for files.Exists().""" def Pre(self, **kwargs): path = files.ValidatePaths(kwargs['path']) file_obj, is_final_result = _Get(path) if is_final_result: return hooks.TitanMethodResult(bool(file_obj)) return {'path': file_obj} class HookForWrites(hooks.Hook): """A hook for files.Write(), files.Delete(), and files.Touch().""" def Pre(self, **kwargs): paths = files.ValidatePaths(kwargs.get('path', kwargs.get('paths'))) _Clear(paths) class HookForCopy(hooks.Hook): """A hook for files.Copy().""" def Pre(self, **kwargs): paths = files.ValidatePaths(kwargs['destination_path']) _Clear(paths) def _GetRequestLocalFilesStore(): """Returns a request-local MRUDict mapping paths to File objects.""" # os.environ is replaced by the runtime environment with a request-local # object, allowing non-string types to be stored globally in the environment # and automatically cleaned up at the end of each request. if _ENVIRON_FILES_STORE_NAME not in os.environ: local_files_store = datastructures.MRUDict(max_size=DEFAULT_MRU_SIZE) os.environ[_ENVIRON_FILES_STORE_NAME] = local_files_store return os.environ[_ENVIRON_FILES_STORE_NAME] def _Get(paths): """Get File objects from paths, and populate the global cache.""" is_multiple = hasattr(paths, '__iter__') local_files_store = _GetRequestLocalFilesStore() paths_set = set(paths if is_multiple else [paths]) cached_file_keys = set(local_files_store.keys()) cached_paths = list(cached_file_keys & paths_set) uncached_paths = list(paths_set - cached_file_keys) if cached_paths and not uncached_paths: # All of the requested files are currently in the cache; return this subset. file_objs = dict( (k, local_files_store[k]) for k in cached_paths if local_files_store[k]) return __NormalizeResult(file_objs, is_multiple) # Merge file objects which existed in the global cache into the result. # Order is important here: these need to be added before the uncached_paths # are merged in. file_objs = {} for path in cached_paths: # This affects the MRUDict, so only grab the value once. value = local_files_store[path] if value: # The cached calue could be None, meaning the file doesn't exist. file_objs[path] = value new_file_objs = files.Get(uncached_paths, disabled_services=True) local_files_store.update(new_file_objs) file_objs.update(new_file_objs) # Also store Nones, so that non-existent files are not re-fetched. for path in uncached_paths: if path not in new_file_objs: local_files_store[path] = None return __NormalizeResult(file_objs, is_multiple) def _Clear(paths): """Remove paths from the global cache.""" is_multiple = hasattr(paths, '__iter__') local_files_store = _GetRequestLocalFilesStore() paths_list = paths if is_multiple else [paths] for path in paths_list: if path in local_files_store: del local_files_store[path] def __NormalizeResult(file_objs, is_multiple): """Handle all result cases including multiple paths and non-existent paths.""" # Return should be compatible with the path arg to files.Get(), or # should give is_final_result=True to signal wrapper to use TitanMethodResult. is_final_result = False if is_multiple: result = file_objs.values() else: result = file_objs.values()[0] if file_objs else None if not result: # Non-existent result in cache or otherwise. Signal a short-circuit return. is_final_result = True result = {} if is_multiple else None return result, is_final_result
{ "content_hash": "cd717d52c4f34ffe7184f6360b3da790", "timestamp": "", "source": "github", "line_count": 133, "max_line_length": 80, "avg_line_length": 37.714285714285715, "alnum_prop": 0.7175039872408293, "repo_name": "paulftw/titan-files", "id": "8aba4660d7d8ed1bb4fba65522458e88def80e45", "size": "5635", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "titan/services/memory_files.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Perl", "bytes": "11357" }, { "name": "Python", "bytes": "684850" } ], "symlink_target": "" }
import asyncio import sys def main(): """ Initialize the event loop and clean up when exitting. """ try: from .panel import start loop = asyncio.get_event_loop() loop.run_until_complete(start()) except KeyboardInterrupt: sys.exit(130) finally: pending = asyncio.Task.all_tasks() gathered = asyncio.gather(*pending) try: gathered.cancel() loop.run_until_complete(gathered) gathered.exception() except asyncio.CancelledError: pass finally: loop.close() if __name__ == '__main__': main()
{ "content_hash": "f6a6d54ecc7b4f6789163f2b463fc975", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 57, "avg_line_length": 21, "alnum_prop": 0.554531490015361, "repo_name": "arianon/panel", "id": "333089cd788e4e3e95125ae51cf5288ed4039185", "size": "677", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "panel/__main__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "11367" } ], "symlink_target": "" }
from google.appengine.ext import ndb from flask.ext import wtf import flask import wtforms import auth import config import model import util from main import app import random # Generate useful random data from faker import Factory ############################################################################### # Help Functions ############################################################################### def _return_state(state): """Returns either True or False depending on state. State can be "true", "false", or "random.""" if state == "true": return True elif state == "false": return False else: return True if round(random.random()) else False ############################################################################### # Population Page ############################################################################### @app.route('/admin/populate', methods=['GET']) @auth.admin_required def admin_populate(form=None): form_user = PopulateUserForm() form_col = PopulateCollectionForm() form_col_user = PopulateCollectionUserForm() form_tag = PopulateTagForm() form_icon = PopulateIconForm() form_waypoint = PopulateWayPointForm() return flask.render_template( 'admin/populate/populate.html', title='Populate', html_class='populate', form_user=form_user, form_col=form_col, form_col_user=form_col_user, form_tag=form_tag, form_icon=form_icon, form_waypoint=form_waypoint ) ## USER ---------------------- @app.route('/admin/populate/user', methods=['POST']) @auth.admin_required def admin_populate_user(): # Create a fake instance fake = Factory.create() form_user = PopulateUserForm() if form_user.validate_on_submit(): user_dbs = [] nr = form_user.number_of_users.data if nr > 1000: flask.flash('You cannot create more than 1000 new users at once. Try again!',\ category='danger') return flask.redirect(flask.url_for('admin_populate')) for i in range(nr): user = fake.profile(fields=['name','username','mail']) user_dbs.append(model.User(name=user['name'],\ username=user['username'], email=user['mail'], active=_return_state(form_user.active.data), admin=_return_state(form_user.admin.data), verified=_return_state(form_user.verified.data))) keys = ndb.put_multi(user_dbs) for key in keys: # run the new user function auth.new_user(key) flask.flash('Created {nr} new users'.\ format(nr=len(user_dbs)), category='success') return flask.redirect(flask.url_for('admin_populate')) ############################################################################### # Collection @app.route('/admin/populate/collection', methods=['POST']) @auth.admin_required def admin_populate_collection(): fake = Factory.create() form_col = PopulateCollectionForm() # Somehow `validate_on_submit()` is False even when submited, why? if form_col.validate_on_submit() or True: print "Collection Valid" col_dbs = [] nr = form_col.number_of_collections.data if nr > 200: flask.flash('You cannot create more than 200 new collections at once. Try again!',\ category='danger') return flask.redirect(flask.url_for('admin_populate')) creator_random = False if form_col.creator.data == "current": creator = auth.current_user_key() elif form_col.creator.data == "search": user_dbs, _ = model.User.get_dbs(email=form_col.user_email.data,limit=2) if user_dbs: creator = user_dbs[0].key else: flask.flash('User with email {} not found. Try again!'.\ format(form_col.user_email.data),\ category='danger') return flask.redirect(flask.url_for('admin_populate')) else: creator_random = True user_keys = model.User.query().fetch(limit=5000, keys_only=True) for i in range(nr): if creator_random: creator = random.choice(user_keys) print creator sentence_length = int(form_col.desc_min.data+\ random.random()*(form_col.desc_max.data-form_col.desc_min.data)) if sentence_length <= 5: desc = "" else: desc = fake.text(max_nb_chars=sentence_length) model.Collection.create(name=fake.word(), creator=creator, description=desc, active=_return_state(form_col.active.data), public=_return_state(form_col.public.data)) flask.flash('Created {nr} new collections'.\ format(nr=nr), category='success') else: print "Collection NOT Valid" return flask.redirect(flask.url_for('admin_populate')) ############################################################################### # CollectionUser @app.route('/admin/populate/collection_user', methods=['POST']) @auth.admin_required def admin_populate_collection_user(): #TODO add this to a task! # it takes quite a long time #fake = Factory.create() form_col_user = PopulateCollectionUserForm() permission_list = ('none','read','write','admin','creator') # Somehow `validate_on_submit()` is False even when submited, why? if form_col_user.validate_on_submit() or True: user_keys = model.User.query().fetch(limit=5000, keys_only=True) cnt = 0 cnt_users = 0 for key in model.Collection.qry(private=False,public=False)\ .fetch(keys_only=True, limit=form_col_user.max_collections.data): user_nr = int(form_col_user.user_min.data+\ random.random()*(form_col_user.user_max.data-form_col_user.user_min.data)) cnt_users += user_nr users = random.sample(user_keys,user_nr) if form_col_user.permission.data == "random": users_perm = [] for user in users: users_perm.append((user,random.choice(permission_list))) model.Collection.add_users(key,users_perm,permission=False) else: model.Collection.add_users(key,users,\ permission=form_col_user.permission.data) cnt += 1 flask.flash('Added a total of {usr_nr} users to {nr} collections'.\ format(usr_nr=cnt_users, nr=cnt), category='success') return flask.redirect(flask.url_for('admin_populate')) # ############################################################################### # Tag @app.route('/admin/populate/tag/', methods=['POST']) @auth.admin_required def admin_populate_tag(): form_tag = PopulateTagForm() if form_tag.validate_on_submit() or True: if form_tag.random_tags.data: if form_tag.max_tags.data > 500: flask.flash('Not more than 500 random tags can be created at once. Try again!',\ category='danger') return flask.redirect(flask.url_for('admin_populate')) fake = Factory.create() tags = fake.words(nb=form_tag.max_tags.data) else: tags = form_tag.tags.data.split(', ') # Are icon needed as well? if form_tag.icon.data: icon_keys, _ = model.Icon.get_dbs(keys_only=True,limit=2000, collection=model.Collection.top_key()) else: icon_keys = None cnt = 0 incr = True if form_tag.incr.data=='true' else False for tag in tags: icon_key = random.choice(icon_keys) if icon_keys else None model.Tag.add(tag,auto_incr=incr,icon_key=icon_key) cnt += 1 flask.flash('Added {nr} tags'.\ format(nr=cnt), category='success') return flask.redirect(flask.url_for('admin_populate')) # ############################################################################### # Icon @app.route('/admin/populate/icon', methods=['POST']) @auth.admin_required def admin_populate_icon(): form_icon = PopulateIconForm() if form_icon.validate_on_submit() or True: names = "" fs = flask.request.files.getlist("icon") cnt = 0 for f in fs: icon = f.read() model.Icon.create(icon=icon, name=f.filename.split('.')[0]) names += f.filename.split('.')[0]+" " cnt += 1 flask.flash('Added {} icon: {}'.format(cnt, names), category='success') return flask.redirect(flask.url_for('admin_populate')) ############################################################################### # WayPoint @app.route('/admin/populate/waypoint', methods=['POST']) @auth.admin_required def admin_populate_waypoint(): form_waypoint = PopulateWayPointForm() if form_waypoint.validate_on_submit() or True: # Create a fake instance fake = Factory.create() # create collection list if form_waypoint.collection.data == "random": if form_waypoint.collection_user.data: email = form_waypoint.collection_user.data col_usr_dbs = model.CollectionUser.qry(user_email=email).\ fetch(limit=form_waypoint.max_collections.data) if not col_usr_dbs: flask.flash("No colleciton found for user {}."\ .format(email), category='danger') return flask.redirect(flask.url_for('admin_populate')) col_keys=[] for db in col_usr_dbs: col_keys.append(db.collection) else: col_keys = model.Collection.qry().\ fetch(limit=form_waypoint.max_collections.data,\ keys_only=True) elif form_waypoint.collection.data == "search": col_keys = [ndb.Key(urlsafe=form_waypoint.collection_key.data)] else: # not is not implemented/possible flask.flash("Key error, 'none' is not possible.", category='danger') return flask.redirect(flask.url_for('admin_populate')) # set up tag list if form_waypoint.tags.data == "list": tag_list = form_waypoint.tag_list.data.split(', ') elif form_waypoint.tags.data == "random": #tag_dbs = model.Tag.qry(collection=model.Collection.top_key()).fetch(limit=10000) tag_dbs, _ = model.Tag.get_dbs(collection=model.Collection.top_key(), limit=10000) tag_list = [] for db in tag_dbs: tag_list.append(db.name) else: tag_list = None dbs = [] cnt = 0 # create waypoints for key in col_keys: for i in range(0,form_waypoint.max_waypoints.data): name = fake.word() desc = fake.sentence() # roughly switzerland lat = random.random()*3+45 lng = random.random()*4 + 6 geo = ndb.GeoPt(lat,lng) db = model.WayPoint(name=name,description=desc,collection=key,geo=geo) if tag_list: tag_nr = int(random.random()*form_waypoint.max_tags.data) while tag_nr > len(tag_list): tag_nr -=1 db.add_tags(random.sample(tag_list,tag_nr)) dbs.append(db) cnt += 1 ndb.put_multi(dbs) flask.flash('Added {} waypoints'.format(cnt), category='success') return flask.redirect(flask.url_for('admin_populate')) # # ############################################################################### # Population Forms ############################################################################### # User class PopulateUserForm(wtf.Form): number_of_users = wtforms.IntegerField( u'New Users (#)', [wtforms.validators.required()], default=100 ) verified = wtforms.RadioField(u'Verified',[wtforms.validators.required()],\ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down"),("random","random")],default="true") admin = wtforms.RadioField(u'Admin',[wtforms.validators.required()], \ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down"),("random","random")],default="random") active = wtforms.RadioField(u'Active',[wtforms.validators.required()],\ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down"),("random","random")],default="true") def __init__(self, *args, **kwds): super(PopulateUserForm, self).__init__(*args, **kwds) ############################################################################### # Collection class PopulateCollectionForm(wtf.Form): number_of_collections = wtforms.IntegerField( u'New Collections (#)', [wtforms.validators.required()], default=10 ) desc_min = wtforms.IntegerField( u'Min Description Length', [wtforms.validators.required()], default=5 ) desc_max = wtforms.IntegerField( u'Max Description Length', [wtforms.validators.required()], default=65 ) user_email = wtforms.StringField( u'Creator email',description="Only if search is active", validators=[wtforms.validators.Email()], ) creator = wtforms.RadioField(u'Creator',[wtforms.validators.required()],\ choices=[("current", "Current user"),\ ("random","Random users"),("search","Search for a user")],default="current") public = wtforms.RadioField(u'Public',[wtforms.validators.required()], \ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down"),("random","random")],default="false") active = wtforms.RadioField(u'Active',[wtforms.validators.required()],\ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down"),("random","random")],default="true") def __init__(self, *args, **kwds): super(PopulateCollectionForm, self).__init__(*args, **kwds) ############################################################################### # CollecitonUser class PopulateCollectionUserForm(wtf.Form): user_min = wtforms.IntegerField( u'Min user to collection', [wtforms.validators.required()], default=5 ) user_max = wtforms.IntegerField( u'Max user to collection', [wtforms.validators.required()], default=20 ) max_collections = wtforms.IntegerField( u'Max collections', [wtforms.validators.required()], default=30 ) permission = wtforms.RadioField(u'Permission',[wtforms.validators.required()],\ choices=[\ ("none","ban"),\ ("read", "book"),\ ("write","pencil"),\ ("admin","user"),\ ("creator","user-plus"),\ ("random","random")],default="read") def __init__(self, *args, **kwds): super(PopulateCollectionUserForm, self).__init__(*args, **kwds) ############################################################################### # Tag class PopulateTagForm(wtf.Form): DEFAULT_TAGS="""hiking, skitour, ski, mountain, peak, hill, T1, T2, T3, T4, T5, T6, -, +, river, lake, forest, over 3000, over 4000, bike, moutainbike, running, hangover, accomodation, hut, hostel, hotel, bus station, train station, public transport, station, restaurant, food, supermarket, beer, break, view, danger, ship, train, cable car, parking""" tags = wtforms.TextAreaField( u'Tag list', default=DEFAULT_TAGS, description="Separate the tags with a comma" ) max_tags = wtforms.IntegerField( u'Max random tags', default=50 ) random_tags = wtforms.BooleanField(u'Random tags',default=False) incr = wtforms.RadioField(u'Increment',[wtforms.validators.required()], \ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down")],default="false") icon = wtforms.RadioField(u'Add Icon',[wtforms.validators.required()], \ choices=[("true", "thumbs-o-up"),\ ("false","thumbs-o-down")],default="false",\ description="Only gobal icon which already exist.") def __init__(self, *args, **kwds): super(PopulateTagForm, self).__init__(*args, **kwds) ############################################################################### # Icon class PopulateIconForm(wtf.Form): icon = wtforms.FileField(u'Icons') def __init__(self, *args, **kwds): super(PopulateIconForm, self).__init__(*args, **kwds) ############################################################################### # WayPoint class PopulateWayPointForm(wtf.Form): collection = wtforms.RadioField(u'Collection',[wtforms.validators.required()],\ description='How should collection be used (random, none (toplevel), search).', choices=[\ ("random", "random"),\ ("search","key")],default="random") max_waypoints = wtforms.IntegerField( u'Max waypoints per collection', [wtforms.validators.required()], description='How many waypoints are added to one collection', default=5 ) max_collections = wtforms.IntegerField( u'Max collections', [wtforms.validators.required()], description='How many collections are used to add waypoints, only if random', default=10 ) collection_user = wtforms.StringField( u'Collection user email.', description='Only take random collection with this user.' ) collection_key = wtforms.StringField( u'URL safe collection key.', [wtforms.validators.required()], description='Only if search.' ) tags = wtforms.RadioField(u'Random or from list',[wtforms.validators.required()],\ description='Add random tags from a list or from all possible tags, or don\'t add tags at all.', choices=[\ ("list","list"),\ ("random", "random"),\ ("none", "ban")\ ],default="list") max_tags = wtforms.IntegerField( u'Max tags per collection', [wtforms.validators.required()], description='Can also be less (random).', default=10 ) DEFAULT_TAGS="""hiking, skitour, ski, mountain, peak, hill, T1, T2, T3, T4, T5, T6, -, +, river, lake, forest, over 3000, over 4000, bike, moutainbike, running, hangover, accomodation, hut, hostel, hotel, bus station, train station, public transport, station, restaurant, food, supermarket, beer, break, view, danger, ship, train, cable car, parking""" tag_list = wtforms.TextAreaField( u'Tag list', default=DEFAULT_TAGS, description="Separate the tags with a comma" ) def __init__(self, *args, **kwds): super(PopulateWayPointForm, self).__init__(*args, **kwds)
{ "content_hash": "8789d64122dffa270939605f0e6a1a4b", "timestamp": "", "source": "github", "line_count": 497, "max_line_length": 354, "avg_line_length": 35.65191146881288, "alnum_prop": 0.5949545685422428, "repo_name": "wodore/wodore-gae", "id": "b01d5fa89347bbb243bd9d39c140ad1875fc3b03", "size": "17736", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "main/control/populate.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5926" }, { "name": "CoffeeScript", "bytes": "28017" }, { "name": "HTML", "bytes": "116770" }, { "name": "JavaScript", "bytes": "65" }, { "name": "Python", "bytes": "290702" }, { "name": "Shell", "bytes": "81" } ], "symlink_target": "" }
from PyQt4 import QtGui, QtCore from ..ramps import KeyFrameList from CommonWidgets import MyDoubleSpinBox import rampage.format as fmt import rampage.server as server from DictEditor import DictEditor all_hooks_dict = dict(server.Hooks.default_mesgs) class QEditKeyFrameDialog(QtGui.QDialog): def __init__(self, key_name, kf, parent_list, disabled_list, parent): super(QEditKeyFrameDialog, self).__init__(parent) self.setWindowTitle('Edit keyframe') self.text_name = QtGui.QLineEdit(key_name, self) self.text_comment = QtGui.QLineEdit(kf['comment'], self) self.button_ok = QtGui.QPushButton('Ok', self) self.button_ok.clicked.connect(self.accept) self.button_cancel = QtGui.QPushButton('Cancel', self) self.button_cancel.clicked.connect(self.reject) self.button_ok.clicked.connect(self.accept) self.combo_parent = QtGui.QComboBox(self) self.combo_parent.addItem('None') self.combo_parent.addItems(parent_list) if kf['parent'] is None: self.current_parent_index = 0 # assume no parent else: self.current_parent_index = parent_list.index(kf['parent']) + 1 self.combo_parent.setCurrentIndex(self.current_parent_index) self.grid = QtGui.QGridLayout(self) self.grid.addWidget(QtGui.QLabel('Name'), 0, 0) self.grid.addWidget(self.text_name, 0, 1) self.grid.addWidget(QtGui.QLabel('Comment'), 1, 0) self.grid.addWidget(self.text_comment, 1, 1) parent_string = 'Parent (current: ' + str(kf['parent']) + ')' self.grid.addWidget(QtGui.QLabel(parent_string), 2, 0) self.grid.addWidget(self.combo_parent) self.grid.addWidget(self.button_ok, 3, 0) self.grid.addWidget(self.button_cancel, 3, 1) self.setLayout(self.grid) for i, is_disabled in enumerate(disabled_list): if is_disabled: j = self.combo_parent.model().index(i + 1, 0) self.combo_parent.model().setData(j, QtCore.QVariant(0), QtCore.Qt.UserRole-1) self.kf = kf self.key_name = key_name self.parent_list = parent_list def exec_(self): execReturn = super(QEditKeyFrameDialog, self).exec_() name = str(self.text_name.text()) comment = str(self.text_comment.text()) parent_string = str(self.combo_parent.currentText()) return execReturn, name, comment, parent_string class QKeyFrame(QtGui.QWidget): """GUI for an individual keyframe. Disylays and can edit time, comments, parent of keyframes.""" edit_signal = QtCore.pyqtSignal(object) delete_signal = QtCore.pyqtSignal(object) insert_before_signal = QtCore.pyqtSignal(object) add_child_signal = QtCore.pyqtSignal(object) time_changed_signal = QtCore.pyqtSignal(object, object) edit_hooks = QtCore.pyqtSignal() def __init__(self, key_name, kf, settings): super(QKeyFrame, self).__init__() self.settings = settings self.vbox = QtGui.QVBoxLayout(self) self.time_spin_box = MyDoubleSpinBox(self) self.name_label = QtGui.QLabel(self) self.abs_time_label = QtGui.QLabel(self) self.setFocusProxy(self.time_spin_box) for w in [self.name_label, self.abs_time_label, self.time_spin_box]: w.setAlignment(QtCore.Qt.AlignRight) self.vbox.addWidget(w) self.setLayout(self.vbox) self.time_spin_box.setKeyboardTracking(False) self.time_spin_box.valueChanged.connect(self.handleTimeChanged) # TODO: see if these numbers need to be changed in settings self.time_spin_box.setDecimals(3) self.time_spin_box.setMaximum(1e5) self.time_spin_box.setMinimum(-1e5) self.update(key_name, kf) # create actions to edit the keyframe self.edit_action = QtGui.QAction('&Edit', self) self.insert_action = QtGui.QAction('&Insert Before', self) self.add_action = QtGui.QAction('&Add child', self) self.delete_action = QtGui.QAction('Delete', self) # connect actions to slots self.edit_action.triggered.connect(self.edit) self.insert_action.triggered.connect(self.insert) self.add_action.triggered.connect(self.add) self.delete_action.triggered.connect(self.delete) # create hook menu self.hook_menu = QtGui.QMenu('Hooks', self) self.add_hook_menu = QtGui.QMenu('Add', self) self.edit_hook_menu = QtGui.QMenu('Edit', self) self.del_hook_menu = QtGui.QMenu('Delete', self) self.hook_menu.addMenu(self.add_hook_menu) self.hook_menu.addMenu(self.edit_hook_menu) self.hook_menu.addMenu(self.del_hook_menu) # create context menu self.pop_menu = QtGui.QMenu(self) self.pop_menu.addAction(self.edit_action) self.pop_menu.addAction(self.insert_action) self.pop_menu.addAction(self.add_action) self.pop_menu.addSeparator() self.pop_menu.addMenu(self.hook_menu) self.pop_menu.addSeparator() self.pop_menu.addAction(self.delete_action) # right clicking on the keyframe will bring up the context menu self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) signal_str = 'customContextMenuRequested(const QPoint&)' self.connect(self, QtCore.SIGNAL(signal_str), self.onContextMenu) def update(self, key_name, kf): # disconnect slot before updating, reconnect at the end. self.key_name = key_name self.kf = kf self.updateUI() def updateUI(self): self.time_spin_box.valueChanged.disconnect(self.handleTimeChanged) self.time_spin_box.setValue(self.kf['time']) self.abs_time_label.setText(str(self.kf['__abs_time__'])) self.generateToolTip() if 'hooks' in self.kf: print(len(self.kf['hooks'])) add_hook_symbol = len(self.kf['hooks']) > 0 else: add_hook_symbol = False if add_hook_symbol: self.name_label.setText(fmt.b(fmt.red(self.key_name+' !'))) else: self.name_label.setText(fmt.b(fmt.red(self.key_name))) self.time_spin_box.valueChanged.connect(self.handleTimeChanged) def generateToolTip(self): tt = 'Name: ' + fmt.b(fmt.red(self.key_name)) + '<br>\n' tt += 'Comment: ' + fmt.i(self.kf['comment']) + '<br>\n' if self.kf['parent'] is not None: tt += 'Parent: ' + fmt.b(fmt.red(self.kf['parent'])) else: tt += 'Parent: ' + fmt.b(fmt.red("None")) tt += '<br>Abs. Time: ' + str(self.kf['__abs_time__']) # get info on hooks tt += '<br> Hooks: ' if 'hooks' not in self.kf: tt += 'None' else: l = list(self.kf['hooks'].iterkeys()) if len(l) is 0: tt += 'None' else: tt += ', '.join(l) tt += '<br><i>right-click label to edit...</i>' self.setToolTip(tt) def onContextMenu(self, point): # show context menu self.add_hook_menu.clear() for hook_name in all_hooks_dict.iterkeys(): action = self.add_hook_menu.addAction(hook_name) action.triggered[()].connect(lambda x=hook_name: self.addHook(x)) self.edit_hook_menu.clear() if 'hooks' in self.kf: for hook_name in self.kf['hooks'].iterkeys(): action = self.edit_hook_menu.addAction(hook_name) action.triggered[()].connect(lambda x=hook_name: self.editHook(x)) self.edit_hook_menu.clear() self.del_hook_menu.clear() if 'hooks' in self.kf: for hook_name in self.kf['hooks'].iterkeys(): action = self.edit_hook_menu.addAction(hook_name) action.triggered[()].connect(lambda x=hook_name: self.addHook(x)) action = self.del_hook_menu.addAction(hook_name) action.triggered[()].connect(lambda x=hook_name: self.delHook(x)) self.pop_menu.exec_(self.mapToGlobal(point)) def addHook(self, hook_name): hook_name = str(hook_name) print('ADDING', hook_name) print(all_hooks_dict[hook_name]) # if hook is already added, then edit it if 'hooks' in self.kf: if hook_name in self.kf['hooks']: data_dict = dict(self.kf['hooks'][hook_name]) else: data_dict = dict(all_hooks_dict[hook_name]) else: data_dict = dict(all_hooks_dict[hook_name]) dct_editor = DictEditor(data_dict, hook_name) if dct_editor.exec_(): if 'hooks' not in self.kf: self.kf['hooks'] = {} self.kf['hooks'][hook_name] = data_dict self.updateUI() self.edit_hooks.emit() def delHook(self, hook_name): # else, ask if user wants to save file msg_str = "Really delete hook:" + hook_name+"?" reply = QtGui.QMessageBox.warning(self, 'Delete hook', msg_str, (QtGui.QMessageBox.Yes | QtGui.QMessageBox.No), QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: self.kf['hooks'].pop(hook_name) self.updateUI() self.edit_hooks.emit() def edit(self): self.edit_signal.emit(self.key_name) def insert(self): self.insert_before_signal.emit(self.key_name) def add(self): self.add_child_signal.emit(self.key_name) def delete(self): self.delete_signal.emit(self.key_name) def handleTimeChanged(self, new_time): self.time_changed_signal.emit(self.key_name, float(new_time)) class QArrowWidget(QtGui.QLabel): """Responsible for drawing arrows which show parent child relationship.""" def __init__(self, arrow_list, grid, start_pos=(0, 0), parent=None): super(QArrowWidget, self).__init__(parent) self.grid = grid self.start_pos = start_pos self.grid.addWidget(self, self.start_pos[0], self.start_pos[1], 1, -1) self.figureOutHowToDrawArrows(arrow_list) self.setSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Fixed) def sizeHint(self): return QtCore.QSize(1000, (self.max_height+1)*10+5) def figureOutHowToDrawArrows(self, arrow_list): n_cols = 0 for arrow in arrow_list: n_cols = max(max(n_cols, arrow[0]), arrow[1]) self.n_cols = n_cols + 1 self.arrow_list = arrow_list availability = [] self.height_list = [] for arrow in arrow_list: left_index = min(arrow[0], arrow[1]) right_index = max(arrow[0], arrow[1]) used = [False]*(right_index - left_index) for i, avail in enumerate(availability): if all(avail[left_index:right_index]): height = i break else: height = len(availability) availability.append([True]*self.n_cols) availability[height][left_index:right_index] = used self.height_list.append(height) self.max_height = len(availability) def paintEvent(self, event): centers = [] r = self.grid.cellRect(self.start_pos[0], self.start_pos[1]) top_left = r.topLeft() for i in range(self.n_cols): r = self.grid.cellRect(self.start_pos[0], i+self.start_pos[1]) c = r.bottomRight() - top_left centers.append(c) qp = QtGui.QPainter() qp.begin(self) for arrow, height in zip(self.arrow_list, self.height_list): center_left = centers[arrow[0]] center_right = centers[arrow[1]] hp = 10*height + 5 def drawArrow(qp, x1, x2, height): length = 5 qp.drawLine(x1, height, x2, height) qp.drawLine(x1, height - length, x1, height + length) brush = QtGui.QBrush(QtCore.Qt.SolidPattern) qp.setBrush(brush) if x1 > x2: length = -5 points = [QtCore.QPoint(x2, height), QtCore.QPoint(x2-length, height-length), QtCore.QPoint(x2-length, height+length)] arrowHead = QtGui.QPolygon(points) qp.drawPolygon(arrowHead) drawArrow(qp, center_left.x(), center_right.x(), hp) qp.end() class QKeyFrameList(KeyFrameList): """Gui for editing key frames.""" def __init__(self, dct, settings, grid, start_pos=(0, 0), parent_widget=None, set_focus_on=None): super(QKeyFrameList, self).__init__(dct) self.settings = settings self.start_pos = start_pos self.grid = grid self.parent_widget = parent_widget self.set_focus_on = set_focus_on self.setupUi(self) def setupUi(self, widget): self.kf_list = [] skl = self.sorted_key_list() for i, key in enumerate(skl): kf = self.createNewKeyFrameWidget(key, self.dct[key]) self.grid.addWidget(kf, self.start_pos[0], self.start_pos[1] + i) self.kf_list.append(kf) arrow_start = self.start_pos[0] + 1, self.start_pos[1] self.arrow_widget = QArrowWidget(self.getArrowList(), self.grid, start_pos=arrow_start, parent=self.parent_widget) if self.set_focus_on is not None: index = skl.index(self.set_focus_on) self.kf_list[index].setFocus() def getArrowList(self): arrow_list = [] skl = self.sorted_key_list() for i, key in enumerate(skl): right_index = i if self.dct[key]['parent'] is None: continue else: left_index = skl.index(self.dct[key]['parent']) arrow = (left_index, right_index) arrow_list.append(arrow) return arrow_list def createNewKeyFrameWidget(self, key_name, key_dict): kf = QKeyFrame(key_name, key_dict, self.settings) kf.edit_signal.connect(self.handleEdit) kf.delete_signal.connect(self.handleDelete) kf.insert_before_signal.connect(self.handleInsertBefore) kf.add_child_signal.connect(self.handleAddChild) kf.time_changed_signal.connect(self.handleTimeChanged) kf.edit_hooks.connect(self.handleEditHooks) return kf def disconnectKeyFrame(self, kf): # disconnect all signals kf.edit_signal.disconnect(self.handleEdit) kf.delete_signal.disconnect(self.handleDelete) kf.insert_before_signal.disconnect(self.handleInsertBefore) kf.add_child_signal.disconnect(self.handleAddChild) kf.time_changed_signal.disconnect(self.handleTimeChanged) def handleEditHooks(self): self.parent_widget.ramp_changed.emit() def handleTimeChanged(self, key_name, new_time): skl_old = self.sorted_key_list() self.set_time(key_name, new_time) skl_new = self.sorted_key_list() # check if the order of keyframes has changed order_changed = False for kold, knew in zip(skl_old, skl_new): if kold != knew: order_changed = True break if order_changed: for kf in self.kf_list: self.disconnectKeyFrame(kf) print('Re do ui') self.parent_widget.reDoUi(set_focus_on=key_name) else: self.parent_widget.ramp_changed.emit() self.updateAllKeys() def handleEdit(self, key_name): # find out all keys which are descendents of key_name disabled_list = [self.is_ancestor(k, key_name) for k in self.sorted_key_list()] out_tuple = QEditKeyFrameDialog(key_name, self.dct[key_name], self.sorted_key_list(), disabled_list, None).exec_() exec_return, new_key_name, new_comment, new_parent = out_tuple if exec_return == QtGui.QDialog.Accepted: self.dct[key_name]['comment'] = new_comment if new_parent == 'None': new_parent = None self.set_parent(key_name, new_parent) if key_name != new_key_name: self.set_name(key_name, new_key_name) self.parent_widget.handleKeyNameChanged(key_name, new_key_name) for kf in self.kf_list: self.disconnectKeyFrame(kf) self.parent_widget.reDoUi() def handleInsertBefore(self, key_name): new_key_name, ok = QtGui.QInputDialog.getText(self.parent_widget, 'Add child', 'Enter key name:') if ok: parent_key = self.dct[key_name]['parent'] kf = {'time': 1.0, 'parent': parent_key, "comment": "comment"} self.dct[key_name]['parent'] = str(new_key_name) self.add_keyframe(str(new_key_name), kf) for kf in self.kf_list: self.disconnectKeyFrame(kf) self.parent_widget.reDoUi() def handleAddChild(self, key_name): new_key_name, ok = QtGui.QInputDialog.getText(self.parent_widget, 'Add child', 'Enter key name:') if ok: kf = {'time': 1.0, 'parent': key_name, "comment": "comment"} self.add_keyframe(str(new_key_name), kf) for kf in self.kf_list: self.disconnectKeyFrame(kf) self.parent_widget.reDoUi() def handleDelete(self, key_name): self.del_keyframe(key_name) for kf in self.kf_list: self.disconnectKeyFrame(kf) self.parent_widget.reDoUi() def updateAllKeys(self): """Update times for all keys in the layout.""" for kf, key in zip(self.kf_list, self.sorted_key_list()): kf.update(key, self.dct[key])
{ "content_hash": "56346b7a7d9abd39a4cdb0137df2c405", "timestamp": "", "source": "github", "line_count": 473, "max_line_length": 82, "avg_line_length": 39.36363636363637, "alnum_prop": 0.5726408507438638, "repo_name": "shreyaspotnis/rampage", "id": "cd556f7a770726054e0e7d558ff0e9a6b451e173", "size": "18644", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rampage/widgets/KeyFrameWidgets.py", "mode": "33188", "license": "mit", "language": [ { "name": "PowerShell", "bytes": "85" }, { "name": "Python", "bytes": "196700" } ], "symlink_target": "" }
""" STARBURST ACC/FEANTA Server Runner Author: Lokbondo Kung Email: [email protected] """ import os import sys import time import signal import feanta_server import pdu_worker import brick_worker import bb_worker import cryostat_worker from daemon import runner def instantiate(pid_file, log_file): # Instantiate workers. pdu = pdu_worker.PDUWorker() brick = brick_worker.BrickWorker() bb = bb_worker.BBWorker() cryo = cryostat_worker.CryoWorker() # Instantiate server. server = feanta_server.ServerDaemon(pid_file) # Setup log file. server.set_log_file(log_file) # Link workers. server.link_worker(pdu) server.link_worker(brick) server.link_worker(bb) server.link_worker(cryo) return server, None def start(server): # Start server. server_runner = runner.DaemonRunner(server) server_runner.do_action() def __kill(pidfile): pid = 0 with open(pidfile, 'r') as pid_file: pid = int(pid_file.readline()) # Try killing the daemon process try: i = 0 while 1: os.kill(pid, signal.SIGTERM) time.sleep(0.1) i = i + 1 if i % 10 == 0: os.kill(pid, signal.SIGHUP) except OSError, err: err = str(err) if err.find("No such process") > 0: if os.path.exists(pidfile): os.remove(pidfile) else: print str(err) sys.exit(1) def stop(pid_file): __kill(pid_file)
{ "content_hash": "68e7d49b638836dcb612ebb844cd1885", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 49, "avg_line_length": 22.10144927536232, "alnum_prop": 0.6065573770491803, "repo_name": "lokokung/Starburst-Front-End-Control-System", "id": "1e9da2d796f0170b3b027af2b227f3e4b1c267b7", "size": "1525", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/server_runner.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "107710" } ], "symlink_target": "" }
"""Tests for tensorflow.ops.parsing_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import itertools import numpy as np from google.protobuf import json_format from tensorflow.core.example import example_pb2 from tensorflow.core.example import feature_pb2 from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors_impl from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import test_util from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import parsing_ops from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging # Helpers for creating Example objects example = example_pb2.Example feature = feature_pb2.Feature features = lambda d: feature_pb2.Features(feature=d) bytes_feature = lambda v: feature(bytes_list=feature_pb2.BytesList(value=v)) int64_feature = lambda v: feature(int64_list=feature_pb2.Int64List(value=v)) float_feature = lambda v: feature(float_list=feature_pb2.FloatList(value=v)) # Helpers for creating SequenceExample objects feature_list = lambda l: feature_pb2.FeatureList(feature=l) feature_lists = lambda d: feature_pb2.FeatureLists(feature_list=d) sequence_example = example_pb2.SequenceExample def flatten(list_of_lists): """Flatten one level of nesting.""" return itertools.chain.from_iterable(list_of_lists) def flatten_values_tensors_or_sparse(tensors_list): """Flatten each SparseTensor object into 3 Tensors for session.run().""" return list( flatten([[v.indices, v.values, v.dense_shape] if isinstance(v, sparse_tensor.SparseTensor) else [v] for v in tensors_list])) def _compare_output_to_expected(tester, dict_tensors, expected_tensors, flat_output): tester.assertEqual(set(dict_tensors.keys()), set(expected_tensors.keys())) i = 0 # Index into the flattened output of session.run() for k, v in dict_tensors.items(): expected_v = expected_tensors[k] tf_logging.info("Comparing key: %s", k) if isinstance(v, sparse_tensor.SparseTensor): # Three outputs for SparseTensor : indices, values, shape. tester.assertEqual([k, len(expected_v)], [k, 3]) tester.assertAllEqual(expected_v[0], flat_output[i]) tester.assertAllEqual(expected_v[1], flat_output[i + 1]) tester.assertAllEqual(expected_v[2], flat_output[i + 2]) i += 3 else: # One output for standard Tensor. tester.assertAllEqual(expected_v, flat_output[i]) i += 1 class ParseExampleTest(test.TestCase): def _test(self, kwargs, expected_values=None, expected_err=None): with self.cached_session() as sess: if expected_err: with self.assertRaisesWithPredicateMatch(expected_err[0], expected_err[1]): out = parsing_ops.parse_example(**kwargs) sess.run(flatten_values_tensors_or_sparse(out.values())) return else: # Returns dict w/ Tensors and SparseTensors. out = parsing_ops.parse_example(**kwargs) result = flatten_values_tensors_or_sparse(out.values()) # Check values. tf_result = self.evaluate(result) _compare_output_to_expected(self, out, expected_values, tf_result) # Check shapes; if serialized is a Tensor we need its size to # properly check. serialized = kwargs["serialized"] batch_size = ( self.evaluate(serialized).size if isinstance(serialized, ops.Tensor) else np.asarray(serialized).size) for k, f in kwargs["features"].items(): if isinstance(f, parsing_ops.FixedLenFeature) and f.shape is not None: self.assertEqual( tuple(out[k].get_shape().as_list()), (batch_size,) + f.shape) elif isinstance(f, parsing_ops.VarLenFeature): self.assertEqual( tuple(out[k].indices.get_shape().as_list()), (None, 2)) self.assertEqual(tuple(out[k].values.get_shape().as_list()), (None,)) self.assertEqual( tuple(out[k].dense_shape.get_shape().as_list()), (2,)) @test_util.run_deprecated_v1 def testEmptySerializedWithAllDefaults(self): sparse_name = "st_a" a_name = "a" b_name = "b" c_name = "c:has_a_tricky_name" a_default = [0, 42, 0] b_default = np.random.rand(3, 3).astype(bytes) c_default = np.random.rand(2).astype(np.float32) expected_st_a = ( # indices, values, shape np.empty((0, 2), dtype=np.int64), # indices np.empty((0,), dtype=np.int64), # sp_a is DT_INT64 np.array([2, 0], dtype=np.int64)) # batch == 2, max_elems = 0 expected_output = { sparse_name: expected_st_a, a_name: np.array(2 * [[a_default]]), b_name: np.array(2 * [b_default]), c_name: np.array(2 * [c_default]), } self._test({ "example_names": np.empty((0,), dtype=bytes), "serialized": ops.convert_to_tensor(["", ""]), "features": { sparse_name: parsing_ops.VarLenFeature(dtypes.int64), a_name: parsing_ops.FixedLenFeature( (1, 3), dtypes.int64, default_value=a_default), b_name: parsing_ops.FixedLenFeature( (3, 3), dtypes.string, default_value=b_default), c_name: parsing_ops.FixedLenFeature( (2,), dtypes.float32, default_value=c_default), } }, expected_output) def testEmptySerializedWithoutDefaultsShouldFail(self): input_features = { "st_a": parsing_ops.VarLenFeature(dtypes.int64), "a": parsing_ops.FixedLenFeature( (1, 3), dtypes.int64, default_value=[0, 42, 0]), "b": parsing_ops.FixedLenFeature( (3, 3), dtypes.string, default_value=np.random.rand(3, 3).astype(bytes)), # Feature "c" is missing a default, this gap will cause failure. "c": parsing_ops.FixedLenFeature((2,), dtype=dtypes.float32), } # Edge case where the key is there but the feature value is empty original = example(features=features({"c": feature()})) self._test( { "example_names": ["in1"], "serialized": [original.SerializeToString()], "features": input_features, }, expected_err=( errors_impl.OpError, "Name: in1, Feature: c \\(data type: float\\) is required")) # Standard case of missing key and value. self._test( { "example_names": ["in1", "in2"], "serialized": ["", ""], "features": input_features, }, expected_err=( errors_impl.OpError, "Name: in1, Feature: c \\(data type: float\\) is required")) def testDenseNotMatchingShapeShouldFail(self): original = [ example(features=features({ "a": float_feature([1, 1, 3]), })), example(features=features({ "a": float_feature([-1, -1]), })) ] names = ["passing", "failing"] serialized = [m.SerializeToString() for m in original] self._test( { "example_names": names, "serialized": ops.convert_to_tensor(serialized), "features": { "a": parsing_ops.FixedLenFeature((1, 3), dtypes.float32) } }, expected_err=(errors_impl.OpError, "Name: failing, Key: a, Index: 1. Number of float val")) def testDenseDefaultNoShapeShouldFail(self): original = [ example(features=features({ "a": float_feature([1, 1, 3]), })), ] serialized = [m.SerializeToString() for m in original] self._test( { "example_names": ["failing"], "serialized": ops.convert_to_tensor(serialized), "features": { "a": parsing_ops.FixedLenFeature(None, dtypes.float32) } }, expected_err=(ValueError, "Missing shape for feature a")) @test_util.run_deprecated_v1 def testSerializedContainingSparse(self): original = [ example(features=features({ "st_c": float_feature([3, 4]) })), example( features=features({ "st_c": float_feature([]), # empty float list })), example( features=features({ "st_d": feature(), # feature with nothing in it })), example( features=features({ "st_c": float_feature([1, 2, -1]), "st_d": bytes_feature([b"hi"]) })) ] serialized = [m.SerializeToString() for m in original] expected_st_c = ( # indices, values, shape np.array([[0, 0], [0, 1], [3, 0], [3, 1], [3, 2]], dtype=np.int64), np.array([3.0, 4.0, 1.0, 2.0, -1.0], dtype=np.float32), np.array([4, 3], dtype=np.int64)) # batch == 2, max_elems = 3 expected_st_d = ( # indices, values, shape np.array([[3, 0]], dtype=np.int64), np.array(["hi"], dtype=bytes), np.array([4, 1], dtype=np.int64)) # batch == 2, max_elems = 1 expected_output = { "st_c": expected_st_c, "st_d": expected_st_d, } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "st_c": parsing_ops.VarLenFeature(dtypes.float32), "st_d": parsing_ops.VarLenFeature(dtypes.string) } }, expected_output) def testSerializedContainingSparseFeature(self): original = [ example( features=features({ "val": float_feature([3, 4]), "idx": int64_feature([5, 10]) })), example( features=features({ "val": float_feature([]), # empty float list "idx": int64_feature([]) })), example( features=features({ "val": feature(), # feature with nothing in it # missing idx feature })), example( features=features({ "val": float_feature([1, 2, -1]), "idx": int64_feature([0, 9, 3]) # unsorted })) ] serialized = [m.SerializeToString() for m in original] expected_sp = ( # indices, values, shape np.array([[0, 5], [0, 10], [3, 0], [3, 3], [3, 9]], dtype=np.int64), np.array([3.0, 4.0, 1.0, -1.0, 2.0], dtype=np.float32), np.array([4, 13], dtype=np.int64)) # batch == 4, max_elems = 13 expected_output = { "sp": expected_sp, } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "sp": parsing_ops.SparseFeature(["idx"], "val", dtypes.float32, [13]) } }, expected_output) def testSerializedContainingSparseFeatureReuse(self): original = [ example( features=features({ "val1": float_feature([3, 4]), "val2": float_feature([5, 6]), "idx": int64_feature([5, 10]) })), example( features=features({ "val1": float_feature([]), # empty float list "idx": int64_feature([]) })), ] serialized = [m.SerializeToString() for m in original] expected_sp1 = ( # indices, values, shape np.array([[0, 5], [0, 10]], dtype=np.int64), np.array([3.0, 4.0], dtype=np.float32), np.array( [2, 13], dtype=np.int64)) # batch == 2, max_elems = 13 expected_sp2 = ( # indices, values, shape np.array([[0, 5], [0, 10]], dtype=np.int64), np.array([5.0, 6.0], dtype=np.float32), np.array( [2, 7], dtype=np.int64)) # batch == 2, max_elems = 13 expected_output = { "sp1": expected_sp1, "sp2": expected_sp2, } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "sp1": parsing_ops.SparseFeature("idx", "val1", dtypes.float32, 13), "sp2": parsing_ops.SparseFeature( "idx", "val2", dtypes.float32, size=7, already_sorted=True) } }, expected_output) def testSerializedContaining3DSparseFeature(self): original = [ example( features=features({ "val": float_feature([3, 4]), "idx0": int64_feature([5, 10]), "idx1": int64_feature([0, 2]), })), example( features=features({ "val": float_feature([]), # empty float list "idx0": int64_feature([]), "idx1": int64_feature([]), })), example( features=features({ "val": feature(), # feature with nothing in it # missing idx feature })), example( features=features({ "val": float_feature([1, 2, -1]), "idx0": int64_feature([0, 9, 3]), # unsorted "idx1": int64_feature([1, 0, 2]), })) ] serialized = [m.SerializeToString() for m in original] expected_sp = ( # indices np.array( [[0, 5, 0], [0, 10, 2], [3, 0, 1], [3, 3, 2], [3, 9, 0]], dtype=np.int64), # values np.array([3.0, 4.0, 1.0, -1.0, 2.0], dtype=np.float32), # shape batch == 4, max_elems = 13 np.array([4, 13, 3], dtype=np.int64)) expected_output = { "sp": expected_sp, } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "sp": parsing_ops.SparseFeature(["idx0", "idx1"], "val", dtypes.float32, [13, 3]) } }, expected_output) def testSerializedContainingDense(self): aname = "a" bname = "b*has+a:tricky_name" original = [ example( features=features({ aname: float_feature([1, 1]), bname: bytes_feature([b"b0_str"]), })), example( features=features({ aname: float_feature([-1, -1]), bname: bytes_feature([b""]), })) ] serialized = [m.SerializeToString() for m in original] expected_output = { aname: np.array([[1, 1], [-1, -1]], dtype=np.float32).reshape(2, 1, 2, 1), bname: np.array(["b0_str", ""], dtype=bytes).reshape(2, 1, 1, 1, 1), } # No defaults, values required self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenFeature((1, 2, 1), dtype=dtypes.float32), bname: parsing_ops.FixedLenFeature((1, 1, 1, 1), dtype=dtypes.string), } }, expected_output) # This test is identical as the previous one except # for the creation of 'serialized'. def testSerializedContainingDenseWithConcat(self): aname = "a" bname = "b*has+a:tricky_name" # TODO(lew): Feature appearing twice should be an error in future. original = [ (example(features=features({ aname: float_feature([10, 10]), })), example( features=features({ aname: float_feature([1, 1]), bname: bytes_feature([b"b0_str"]), }))), ( example(features=features({ bname: bytes_feature([b"b100"]), })), example( features=features({ aname: float_feature([-1, -1]), bname: bytes_feature([b"b1"]), })), ), ] serialized = [ m.SerializeToString() + n.SerializeToString() for (m, n) in original ] expected_output = { aname: np.array([[1, 1], [-1, -1]], dtype=np.float32).reshape(2, 1, 2, 1), bname: np.array(["b0_str", "b1"], dtype=bytes).reshape(2, 1, 1, 1, 1), } # No defaults, values required self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenFeature((1, 2, 1), dtype=dtypes.float32), bname: parsing_ops.FixedLenFeature((1, 1, 1, 1), dtype=dtypes.string), } }, expected_output) def testSerializedContainingDenseScalar(self): original = [ example(features=features({ "a": float_feature([1]), })), example(features=features({})) ] serialized = [m.SerializeToString() for m in original] expected_output = { "a": np.array([[1], [-1]], dtype=np.float32) # 2x1 (column vector) } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "a": parsing_ops.FixedLenFeature( (1,), dtype=dtypes.float32, default_value=-1), } }, expected_output) def testSerializedContainingDenseWithDefaults(self): original = [ example(features=features({ "a": float_feature([1, 1]), })), example(features=features({ "b": bytes_feature([b"b1"]), })), example(features=features({ "b": feature() })), ] serialized = [m.SerializeToString() for m in original] expected_output = { "a": np.array([[1, 1], [3, -3], [3, -3]], dtype=np.float32).reshape( 3, 1, 2, 1), "b": np.array(["tmp_str", "b1", "tmp_str"], dtype=bytes).reshape( 3, 1, 1, 1, 1), } self._test({ "serialized": ops.convert_to_tensor(serialized), "features": { "a": parsing_ops.FixedLenFeature( (1, 2, 1), dtype=dtypes.float32, default_value=[3.0, -3.0]), "b": parsing_ops.FixedLenFeature( (1, 1, 1, 1), dtype=dtypes.string, default_value="tmp_str"), } }, expected_output) @test_util.run_deprecated_v1 def testSerializedContainingSparseAndSparseFeatureAndDenseWithNoDefault(self): expected_st_a = ( # indices, values, shape np.empty((0, 2), dtype=np.int64), # indices np.empty((0,), dtype=np.int64), # sp_a is DT_INT64 np.array([2, 0], dtype=np.int64)) # batch == 2, max_elems = 0 expected_sp = ( # indices, values, shape np.array([[0, 0], [0, 3], [1, 7]], dtype=np.int64), np.array(["a", "b", "c"], dtype="|S"), np.array( [2, 13], dtype=np.int64)) # batch == 4, max_elems = 13 original = [ example( features=features({ "c": float_feature([3, 4]), "val": bytes_feature([b"a", b"b"]), "idx": int64_feature([0, 3]) })), example( features=features({ "c": float_feature([1, 2]), "val": bytes_feature([b"c"]), "idx": int64_feature([7]) })) ] names = ["in1", "in2"] serialized = [m.SerializeToString() for m in original] a_default = [1, 2, 3] b_default = np.random.rand(3, 3).astype(bytes) expected_output = { "st_a": expected_st_a, "sp": expected_sp, "a": np.array(2 * [[a_default]]), "b": np.array(2 * [b_default]), "c": np.array([[3, 4], [1, 2]], dtype=np.float32), } self._test( { "example_names": names, "serialized": ops.convert_to_tensor(serialized), "features": { "st_a": parsing_ops.VarLenFeature(dtypes.int64), "sp": parsing_ops.SparseFeature("idx", "val", dtypes.string, 13), "a": parsing_ops.FixedLenFeature( (1, 3), dtypes.int64, default_value=a_default), "b": parsing_ops.FixedLenFeature( (3, 3), dtypes.string, default_value=b_default), # Feature "c" must be provided, since it has no default_value. "c": parsing_ops.FixedLenFeature((2,), dtypes.float32), } }, expected_output) @test_util.run_deprecated_v1 def testSerializedContainingSparseAndSparseFeatureWithReuse(self): expected_idx = ( # indices, values, shape np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.int64), np.array([0, 3, 7, 1]), np.array([2, 2], dtype=np.int64)) # batch == 4, max_elems = 2 expected_sp = ( # indices, values, shape np.array([[0, 0], [0, 3], [1, 1], [1, 7]], dtype=np.int64), np.array(["a", "b", "d", "c"], dtype="|S"), np.array([2, 13], dtype=np.int64)) # batch == 4, max_elems = 13 original = [ example( features=features({ "val": bytes_feature([b"a", b"b"]), "idx": int64_feature([0, 3]) })), example( features=features({ "val": bytes_feature([b"c", b"d"]), "idx": int64_feature([7, 1]) })) ] names = ["in1", "in2"] serialized = [m.SerializeToString() for m in original] expected_output = { "idx": expected_idx, "sp": expected_sp, } self._test({ "example_names": names, "serialized": ops.convert_to_tensor(serialized), "features": { "idx": parsing_ops.VarLenFeature(dtypes.int64), "sp": parsing_ops.SparseFeature(["idx"], "val", dtypes.string, [13]), } }, expected_output) def _testSerializedContainingVarLenDenseLargerBatch(self, batch_size): # During parsing, data read from the serialized proto is stored in buffers. # For small batch sizes, a buffer will contain one minibatch entry. # For larger batch sizes, a buffer may contain several minibatch # entries. This test identified a bug where the code that copied # data out of the buffers and into the output tensors assumed each # buffer only contained one minibatch entry. The bug has since been fixed. truth_int = [i for i in range(batch_size)] truth_str = [[("foo%d" % i).encode(), ("bar%d" % i).encode()] for i in range(batch_size)] expected_str = copy.deepcopy(truth_str) # Delete some intermediate entries for i in range(batch_size): col = 1 if np.random.rand() < 0.25: # w.p. 25%, drop out the second entry expected_str[i][col] = b"default" col -= 1 truth_str[i].pop() if np.random.rand() < 0.25: # w.p. 25%, drop out the second entry (possibly again) expected_str[i][col] = b"default" truth_str[i].pop() expected_output = { # Batch size batch_size, 1 time step. "a": np.array(truth_int, dtype=np.int64).reshape(batch_size, 1), # Batch size batch_size, 2 time steps. "b": np.array(expected_str, dtype="|S").reshape(batch_size, 2), } original = [ example( features=features({ "a": int64_feature([truth_int[i]]), "b": bytes_feature(truth_str[i]) })) for i in range(batch_size) ] serialized = [m.SerializeToString() for m in original] self._test({ "serialized": ops.convert_to_tensor(serialized, dtype=dtypes.string), "features": { "a": parsing_ops.FixedLenSequenceFeature( shape=(), dtype=dtypes.int64, allow_missing=True, default_value=-1), "b": parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.string, allow_missing=True, default_value="default"), } }, expected_output) def testSerializedContainingVarLenDenseLargerBatch(self): np.random.seed(3456) for batch_size in (1, 10, 20, 100, 256): self._testSerializedContainingVarLenDenseLargerBatch(batch_size) @test_util.run_deprecated_v1 def testSerializedContainingVarLenDense(self): aname = "a" bname = "b" cname = "c" dname = "d" example_names = ["in1", "in2", "in3", "in4"] original = [ example(features=features({ cname: int64_feature([2]), })), example( features=features({ aname: float_feature([1, 1]), bname: bytes_feature([b"b0_str", b"b1_str"]), })), example( features=features({ aname: float_feature([-1, -1, 2, 2]), bname: bytes_feature([b"b1"]), })), example( features=features({ aname: float_feature([]), cname: int64_feature([3]), })), ] serialized = [m.SerializeToString() for m in original] expected_output = { aname: np.array( [ [0, 0, 0, 0], [1, 1, 0, 0], [-1, -1, 2, 2], [0, 0, 0, 0], ], dtype=np.float32).reshape(4, 2, 2, 1), bname: np.array( [["", ""], ["b0_str", "b1_str"], ["b1", ""], ["", ""]], dtype=bytes).reshape(4, 2, 1, 1, 1), cname: np.array([2, 0, 0, 3], dtype=np.int64).reshape(4, 1), dname: np.empty(shape=(4, 0), dtype=bytes), } self._test({ "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenSequenceFeature( (2, 1), dtype=dtypes.float32, allow_missing=True), bname: parsing_ops.FixedLenSequenceFeature( (1, 1, 1), dtype=dtypes.string, allow_missing=True), cname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.int64, allow_missing=True), dname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.string, allow_missing=True), } }, expected_output) # Test with padding values. expected_output_custom_padding = dict(expected_output) expected_output_custom_padding[aname] = np.array( [ [-2, -2, -2, -2], [1, 1, -2, -2], [-1, -1, 2, 2], [-2, -2, -2, -2], ], dtype=np.float32).reshape(4, 2, 2, 1) self._test({ "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenSequenceFeature( (2, 1), dtype=dtypes.float32, allow_missing=True, default_value=-2.0), bname: parsing_ops.FixedLenSequenceFeature( (1, 1, 1), dtype=dtypes.string, allow_missing=True), cname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.int64, allow_missing=True), dname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.string, allow_missing=True), } }, expected_output_custom_padding) # Change number of required values so the inputs are not a # multiple of this size. self._test( { "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenSequenceFeature( (2, 1), dtype=dtypes.float32, allow_missing=True), bname: parsing_ops.FixedLenSequenceFeature( (2, 1, 1), dtype=dtypes.string, allow_missing=True), } }, expected_err=( errors_impl.OpError, "Name: in3, Key: b, Index: 2. " "Number of bytes values is not a multiple of stride length.")) self._test( { "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenSequenceFeature( (2, 1), dtype=dtypes.float32, allow_missing=True, default_value=[]), bname: parsing_ops.FixedLenSequenceFeature( (2, 1, 1), dtype=dtypes.string, allow_missing=True), } }, expected_err=(ValueError, "Cannot reshape a tensor with 0 elements to shape")) self._test( { "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenFeature( (None, 2, 1), dtype=dtypes.float32), bname: parsing_ops.FixedLenSequenceFeature( (2, 1, 1), dtype=dtypes.string, allow_missing=True), } }, expected_err=(ValueError, "First dimension of shape for feature a unknown. " "Consider using FixedLenSequenceFeature.")) self._test( { "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { cname: parsing_ops.FixedLenFeature( (1, None), dtype=dtypes.int64, default_value=[[1]]), } }, expected_err=(ValueError, "All dimensions of shape for feature c need to be known " r"but received \(1, None\).")) self._test( { "example_names": example_names, "serialized": ops.convert_to_tensor(serialized), "features": { aname: parsing_ops.FixedLenSequenceFeature( (2, 1), dtype=dtypes.float32, allow_missing=True), bname: parsing_ops.FixedLenSequenceFeature( (1, 1, 1), dtype=dtypes.string, allow_missing=True), cname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.int64, allow_missing=False), dname: parsing_ops.FixedLenSequenceFeature( shape=[], dtype=dtypes.string, allow_missing=True), } }, expected_err=(ValueError, "Unsupported: FixedLenSequenceFeature requires " "allow_missing to be True.")) class ParseSingleExampleTest(test.TestCase): def _test(self, kwargs, expected_values=None, expected_err=None): with self.cached_session() as sess: if expected_err: with self.assertRaisesWithPredicateMatch(expected_err[0], expected_err[1]): out = parsing_ops.parse_single_example(**kwargs) sess.run(flatten_values_tensors_or_sparse(out.values())) else: # Returns dict w/ Tensors and SparseTensors. out = parsing_ops.parse_single_example(**kwargs) # Check values. tf_result = sess.run(flatten_values_tensors_or_sparse(out.values())) _compare_output_to_expected(self, out, expected_values, tf_result) # Check shapes. for k, f in kwargs["features"].items(): if isinstance(f, parsing_ops.FixedLenFeature) and f.shape is not None: self.assertEqual( tuple(out[k].get_shape()), tensor_shape.as_shape(f.shape)) elif isinstance(f, parsing_ops.VarLenFeature): self.assertEqual( tuple(out[k].indices.get_shape().as_list()), (None, 1)) self.assertEqual(tuple(out[k].values.get_shape().as_list()), (None,)) self.assertEqual( tuple(out[k].dense_shape.get_shape().as_list()), (1,)) @test_util.run_deprecated_v1 def testSingleExampleWithSparseAndSparseFeatureAndDense(self): original = example( features=features({ "c": float_feature([3, 4]), "d": float_feature([0.0, 1.0]), "val": bytes_feature([b"a", b"b"]), "idx": int64_feature([0, 3]), "st_a": float_feature([3.0, 4.0]) })) serialized = original.SerializeToString() expected_st_a = ( np.array([[0], [1]], dtype=np.int64), # indices np.array([3.0, 4.0], dtype=np.float32), # values np.array([2], dtype=np.int64)) # shape: max_values = 2 expected_sp = ( # indices, values, shape np.array([[0], [3]], dtype=np.int64), np.array(["a", "b"], dtype="|S"), np.array([13], dtype=np.int64)) # max_values = 13 a_default = [1, 2, 3] b_default = np.random.rand(3, 3).astype(bytes) expected_output = { "st_a": expected_st_a, "sp": expected_sp, "a": [a_default], "b": b_default, "c": np.array([3, 4], dtype=np.float32), "d": np.array([0.0, 1.0], dtype=np.float32), } self._test( { "example_names": ops.convert_to_tensor("in1"), "serialized": ops.convert_to_tensor(serialized), "features": { "st_a": parsing_ops.VarLenFeature(dtypes.float32), "sp": parsing_ops.SparseFeature(["idx"], "val", dtypes.string, [13]), "a": parsing_ops.FixedLenFeature( (1, 3), dtypes.int64, default_value=a_default), "b": parsing_ops.FixedLenFeature( (3, 3), dtypes.string, default_value=b_default), # Feature "c" must be provided, since it has no default_value. "c": parsing_ops.FixedLenFeature(2, dtypes.float32), "d": parsing_ops.FixedLenSequenceFeature( [], dtypes.float32, allow_missing=True) } }, expected_output) class ParseSequenceExampleTest(test.TestCase): def testCreateSequenceExample(self): value = sequence_example( context=features({ "global_feature": float_feature([1, 2, 3]), }), feature_lists=feature_lists({ "repeated_feature_2_frames": feature_list([ bytes_feature([b"a", b"b", b"c"]), bytes_feature([b"a", b"d", b"e"]) ]), "repeated_feature_3_frames": feature_list([ int64_feature([3, 4, 5, 6, 7]), int64_feature([-1, 0, 0, 0, 0]), int64_feature([1, 2, 3, 4, 5]) ]) })) value.SerializeToString() # Smoke test def _test(self, kwargs, expected_context_values=None, expected_feat_list_values=None, expected_length_values=None, expected_err=None, batch=False): expected_context_values = expected_context_values or {} expected_feat_list_values = expected_feat_list_values or {} expected_length_values = expected_length_values or {} with self.cached_session() as sess: if expected_err: with self.assertRaisesWithPredicateMatch(expected_err[0], expected_err[1]): if batch: c_out, fl_out, _ = parsing_ops.parse_sequence_example(**kwargs) else: c_out, fl_out = parsing_ops.parse_single_sequence_example(**kwargs) if c_out: sess.run(flatten_values_tensors_or_sparse(c_out.values())) if fl_out: sess.run(flatten_values_tensors_or_sparse(fl_out.values())) else: # Returns dicts w/ Tensors and SparseTensors. if batch: (context_out, feat_list_out, lengths_out) = parsing_ops.parse_sequence_example(**kwargs) else: (context_out, feat_list_out) = parsing_ops.parse_single_sequence_example(**kwargs) lengths_out = {} context_result = sess.run( flatten_values_tensors_or_sparse( context_out.values())) if context_out else [] feat_list_result = sess.run( flatten_values_tensors_or_sparse( feat_list_out.values())) if feat_list_out else [] lengths_result = sess.run( flatten_values_tensors_or_sparse( lengths_out.values())) if lengths_out else [] # Check values. _compare_output_to_expected(self, context_out, expected_context_values, context_result) _compare_output_to_expected(self, feat_list_out, expected_feat_list_values, feat_list_result) _compare_output_to_expected(self, lengths_out, expected_length_values, lengths_result) # Check shapes; if serialized is a Tensor we need its size to # properly check. if "context_features" in kwargs: for k, f in kwargs["context_features"].items(): if isinstance(f, parsing_ops.FixedLenFeature) and f.shape is not None: if batch: self.assertEqual( tuple(context_out[k].get_shape().as_list()[1:]), f.shape) else: self.assertEqual( tuple(context_out[k].get_shape().as_list()), f.shape) elif isinstance(f, parsing_ops.VarLenFeature) and batch: self.assertEqual( tuple(context_out[k].indices.get_shape().as_list()), (None, 2)) self.assertEqual( tuple(context_out[k].values.get_shape().as_list()), (None,)) self.assertEqual( tuple(context_out[k].dense_shape.get_shape().as_list()), (2,)) elif isinstance(f, parsing_ops.VarLenFeature) and not batch: self.assertEqual( tuple(context_out[k].indices.get_shape().as_list()), (None, 1)) self.assertEqual( tuple(context_out[k].values.get_shape().as_list()), (None,)) self.assertEqual( tuple(context_out[k].dense_shape.get_shape().as_list()), (1,)) def _testBoth(self, kwargs, expected_context_values=None, expected_feat_list_values=None, expected_err=None): # Test using tf.io.parse_single_sequence_example self._test( kwargs, expected_context_values=expected_context_values, expected_feat_list_values=expected_feat_list_values, expected_err=expected_err, batch=False) # Convert the input to a batch of size 1, and test using # tf.parse_sequence_example. # Some replacements are needed for the batch version. kwargs["serialized"] = [kwargs.pop("serialized")] kwargs["example_names"] = [kwargs.pop("example_name") ] if "example_name" in kwargs else None # Disable error string matching; it's not consistent for batch mode. if expected_err: expected_err = (expected_err[0], "") # Add a batch dimension to expected output if expected_context_values: new_values = {} for k in expected_context_values: v = expected_context_values[k] if isinstance(kwargs["context_features"][k], parsing_ops.FixedLenFeature): new_values[k] = np.expand_dims(v, axis=0) else: # Sparse tensor. new_values[k] = (np.insert(v[0], 0, 0, axis=1), v[1], np.insert(v[2], 0, 1)) expected_context_values = new_values expected_length_values = {} if expected_feat_list_values: new_values = {} for k in expected_feat_list_values: v = expected_feat_list_values[k] if isinstance(kwargs["sequence_features"][k], parsing_ops.FixedLenSequenceFeature): expected_length_values[k] = [np.shape(v)[0]] new_values[k] = np.expand_dims(v, axis=0) else: # Sparse tensor. new_values[k] = (np.insert(v[0], 0, 0, axis=1), v[1], np.insert(v[2], 0, 1)) expected_feat_list_values = new_values self._test( kwargs, expected_context_values=expected_context_values, expected_feat_list_values=expected_feat_list_values, expected_length_values=expected_length_values, expected_err=expected_err, batch=True) @test_util.run_deprecated_v1 def testSequenceExampleWithSparseAndDenseContext(self): original = sequence_example( context=features({ "c": float_feature([3, 4]), "st_a": float_feature([3.0, 4.0]) })) serialized = original.SerializeToString() expected_st_a = ( np.array([[0], [1]], dtype=np.int64), # indices np.array([3.0, 4.0], dtype=np.float32), # values np.array([2], dtype=np.int64)) # shape: num_features = 2 a_default = [[1, 2, 3]] b_default = np.random.rand(3, 3).astype(bytes) expected_context_output = { "st_a": expected_st_a, "a": a_default, "b": b_default, "c": np.array([3, 4], dtype=np.float32), } self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "context_features": { "st_a": parsing_ops.VarLenFeature(dtypes.float32), "a": parsing_ops.FixedLenFeature( (1, 3), dtypes.int64, default_value=a_default), "b": parsing_ops.FixedLenFeature( (3, 3), dtypes.string, default_value=b_default), # Feature "c" must be provided, since it has no default_value. "c": parsing_ops.FixedLenFeature((2,), dtypes.float32), } }, expected_context_values=expected_context_output) @test_util.run_deprecated_v1 def testSequenceExampleWithMultipleSizeFeatureLists(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([ int64_feature([-1, 0, 1]), int64_feature([2, 3, 4]), int64_feature([5, 6, 7]), int64_feature([8, 9, 10]), ]), "b": feature_list([bytes_feature([b"r00", b"r01", b"r10", b"r11"])]), "c": feature_list([float_feature([3, 4]), float_feature([-1, 2])]), })) serialized = original.SerializeToString() expected_feature_list_output = { "a": np.array( [ # outer dimension is time. [[-1, 0, 1]], # inside are 1x3 matrices [[2, 3, 4]], [[5, 6, 7]], [[8, 9, 10]] ], dtype=np.int64), "b": np.array( [ # outer dimension is time, inside are 2x2 matrices [[b"r00", b"r01"], [b"r10", b"r11"]] ], dtype=bytes), "c": np.array( [ # outer dimension is time, inside are 2-vectors [3, 4], [-1, 2] ], dtype=np.float32), "d": np.empty(shape=(0, 5), dtype=np.float32), # empty_allowed_missing } self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((1, 3), dtypes.int64), "b": parsing_ops.FixedLenSequenceFeature((2, 2), dtypes.string), "c": parsing_ops.FixedLenSequenceFeature(2, dtypes.float32), "d": parsing_ops.FixedLenSequenceFeature( (5,), dtypes.float32, allow_missing=True), } }, expected_feat_list_values=expected_feature_list_output) @test_util.run_deprecated_v1 def testSequenceExampleWithoutDebugName(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([int64_feature([3, 4]), int64_feature([1, 0])]), "st_a": feature_list([ float_feature([3.0, 4.0]), float_feature([5.0]), float_feature([]) ]), "st_b": feature_list([ bytes_feature([b"a"]), bytes_feature([]), bytes_feature([]), bytes_feature([b"b", b"c"]) ]) })) serialized = original.SerializeToString() expected_st_a = ( np.array([[0, 0], [0, 1], [1, 0]], dtype=np.int64), # indices np.array([3.0, 4.0, 5.0], dtype=np.float32), # values np.array([3, 2], dtype=np.int64)) # shape: num_time = 3, max_feat = 2 expected_st_b = ( np.array([[0, 0], [3, 0], [3, 1]], dtype=np.int64), # indices np.array(["a", "b", "c"], dtype="|S"), # values np.array([4, 2], dtype=np.int64)) # shape: num_time = 4, max_feat = 2 expected_st_c = ( np.empty((0, 2), dtype=np.int64), # indices np.empty((0,), dtype=np.int64), # values np.array([0, 0], dtype=np.int64)) # shape: num_time = 0, max_feat = 0 expected_feature_list_output = { "a": np.array([[3, 4], [1, 0]], dtype=np.int64), "st_a": expected_st_a, "st_b": expected_st_b, "st_c": expected_st_c, } self._testBoth( { "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "st_a": parsing_ops.VarLenFeature(dtypes.float32), "st_b": parsing_ops.VarLenFeature(dtypes.string), "st_c": parsing_ops.VarLenFeature(dtypes.int64), "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64), } }, expected_feat_list_values=expected_feature_list_output) @test_util.run_deprecated_v1 def testSequenceExampleWithSparseAndDenseFeatureLists(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([int64_feature([3, 4]), int64_feature([1, 0])]), "st_a": feature_list([ float_feature([3.0, 4.0]), float_feature([5.0]), float_feature([]) ]), "st_b": feature_list([ bytes_feature([b"a"]), bytes_feature([]), bytes_feature([]), bytes_feature([b"b", b"c"]) ]) })) serialized = original.SerializeToString() expected_st_a = ( np.array([[0, 0], [0, 1], [1, 0]], dtype=np.int64), # indices np.array([3.0, 4.0, 5.0], dtype=np.float32), # values np.array([3, 2], dtype=np.int64)) # shape: num_time = 3, max_feat = 2 expected_st_b = ( np.array([[0, 0], [3, 0], [3, 1]], dtype=np.int64), # indices np.array(["a", "b", "c"], dtype="|S"), # values np.array([4, 2], dtype=np.int64)) # shape: num_time = 4, max_feat = 2 expected_st_c = ( np.empty((0, 2), dtype=np.int64), # indices np.empty((0,), dtype=np.int64), # values np.array([0, 0], dtype=np.int64)) # shape: num_time = 0, max_feat = 0 expected_feature_list_output = { "a": np.array([[3, 4], [1, 0]], dtype=np.int64), "st_a": expected_st_a, "st_b": expected_st_b, "st_c": expected_st_c, } self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "st_a": parsing_ops.VarLenFeature(dtypes.float32), "st_b": parsing_ops.VarLenFeature(dtypes.string), "st_c": parsing_ops.VarLenFeature(dtypes.int64), "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64), } }, expected_feat_list_values=expected_feature_list_output) @test_util.run_deprecated_v1 def testSequenceExampleWithEmptyFeatureInFeatureLists(self): original = sequence_example( feature_lists=feature_lists({ "st_a": feature_list([ float_feature([3.0, 4.0]), feature(), float_feature([5.0]), ]), })) serialized = original.SerializeToString() expected_st_a = ( np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64), # indices np.array([3.0, 4.0, 5.0], dtype=np.float32), # values np.array([3, 2], dtype=np.int64)) # shape: num_time = 3, max_feat = 2 expected_feature_list_output = { "st_a": expected_st_a, } self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "st_a": parsing_ops.VarLenFeature(dtypes.float32), } }, expected_feat_list_values=expected_feature_list_output) def testSequenceExampleListWithInconsistentDataFails(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([int64_feature([-1, 0]), float_feature([2, 3])]) })) serialized = original.SerializeToString() self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64) } }, expected_err=(errors_impl.OpError, "Feature list: a, Index: 1." " Data types don't match. Expected type: int64")) def testSequenceExampleListWithWrongDataTypeFails(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([float_feature([2, 3])]) })) serialized = original.SerializeToString() self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64) } }, expected_err=(errors_impl.OpError, "Feature list: a, Index: 0. Data types don't match." " Expected type: int64")) def testSequenceExampleListWithWrongSparseDataTypeFails(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([ int64_feature([3, 4]), int64_feature([1, 2]), float_feature([2.0, 3.0]) ]) })) serialized = original.SerializeToString() self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64) } }, expected_err=(errors_impl.OpError, "Name: in1, Feature list: a, Index: 2." " Data types don't match. Expected type: int64" " Feature is: float_list")) def testSequenceExampleListWithWrongShapeFails(self): original = sequence_example( feature_lists=feature_lists({ "a": feature_list([int64_feature([2, 3]), int64_feature([2, 3, 4])]), })) serialized = original.SerializeToString() self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64) } }, expected_err=(errors_impl.OpError, r"Name: in1, Key: a, Index: 1." r" Number of int64 values != expected." r" values size: 3 but output shape: \[2\]")) def testSequenceExampleWithMissingFeatureListFails(self): original = sequence_example(feature_lists=feature_lists({})) # Test fails because we didn't add: # feature_list_dense_defaults = {"a": None} self._testBoth( { "example_name": "in1", "serialized": ops.convert_to_tensor(original.SerializeToString()), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((2,), dtypes.int64) } }, expected_err=( errors_impl.OpError, "Name: in1, Feature list 'a' is required but could not be found." " Did you mean to include it in" " feature_list_dense_missing_assumed_empty or" " feature_list_dense_defaults?")) @test_util.run_deprecated_v1 def testSequenceExampleBatch(self): first = sequence_example( feature_lists=feature_lists({ "a": feature_list([ int64_feature([-1, 0, 1]), int64_feature([2, 3, 4]), int64_feature([5, 6, 7]), int64_feature([8, 9, 10]), ]) })) second = sequence_example( feature_lists=feature_lists({ "a": feature_list([ int64_feature([21, 2, 11]), ]) })) serialized = [first.SerializeToString(), second.SerializeToString()] expected_feature_list_output = { "a": np.array( [ # outermost dimension is example id [ # middle dimension is time. [[-1, 0, 1]], # inside are 1x3 matrices [[2, 3, 4]], [[5, 6, 7]], [[8, 9, 10]] ], [ # middle dimension is time. [[21, 2, 11]], # inside are 1x3 matrices [[0, 0, 0]], # additional entries are padded with 0 [[0, 0, 0]], [[0, 0, 0]] ] ], dtype=np.int64), "d": np.empty(shape=(2, 0, 5), dtype=np.float32), # allowed_missing } self._test( { "example_names": ops.convert_to_tensor(["in1", "in2"]), "serialized": ops.convert_to_tensor(serialized), "sequence_features": { "a": parsing_ops.FixedLenSequenceFeature((1, 3), dtypes.int64), "d": parsing_ops.FixedLenSequenceFeature( (5,), dtypes.float32, allow_missing=True), } }, expected_feat_list_values=expected_feature_list_output, expected_length_values={ "a": [4, 1], "d": [0, 0] }, batch=True) class DecodeRawTest(test.TestCase): def _decode_v1(self, words): with self.cached_session(): examples = np.array(words) example_tensor = constant_op.constant( examples, shape=examples.shape, dtype=dtypes.string) byte_tensor = parsing_ops.decode_raw_v1(example_tensor, dtypes.uint8) return self.evaluate(byte_tensor) def _decode_v2(self, words, fixed_length=None): with self.cached_session(): examples = np.array(words) byte_tensor = parsing_ops.decode_raw( examples, dtypes.uint8, fixed_length=fixed_length) return self.evaluate(byte_tensor) def _ordinalize(self, words, fixed_length=None): outputs = [] if fixed_length is None: fixed_length = len(words[0]) for word in words: output = [] for i in range(fixed_length): if i < len(word): output.append(ord(word[i])) else: output.append(0) outputs.append(output) return np.array(outputs) def testDecodeRawV1EqualLength(self): words = ["string1", "string2"] observed = self._decode_v1(words) expected = self._ordinalize(words) self.assertAllEqual(expected.shape, observed.shape) self.assertAllEqual(expected, observed) def testDecodeRawV2FallbackEqualLength(self): words = ["string1", "string2"] observed = self._decode_v2(words) expected = self._ordinalize(words) self.assertAllEqual(expected.shape, observed.shape) self.assertAllEqual(expected, observed) def testDecodeRawV1VariableLength(self): words = ["string", "longer_string"] with self.assertRaises(errors_impl.InvalidArgumentError): self._decode_v1(words) def testDecodeRawV2FallbackVariableLength(self): words = ["string", "longer_string"] with self.assertRaises(errors_impl.InvalidArgumentError): self._decode_v2(words) def testDecodeRawV2VariableLength(self): words = ["string", "longer_string"] observed = self._decode_v2(words, fixed_length=8) expected = self._ordinalize(words, fixed_length=8) self.assertAllEqual(expected.shape, observed.shape) self.assertAllEqual(expected, observed) class DecodeJSONExampleTest(test.TestCase): def _testRoundTrip(self, examples): with self.cached_session() as sess: examples = np.array(examples, dtype=np.object) json_tensor = constant_op.constant( [json_format.MessageToJson(m) for m in examples.flatten()], shape=examples.shape, dtype=dtypes.string) binary_tensor = parsing_ops.decode_json_example(json_tensor) binary_val = self.evaluate(binary_tensor) if examples.shape: self.assertShapeEqual(binary_val, json_tensor) for input_example, output_binary in zip( np.array(examples).flatten(), binary_val.flatten()): output_example = example_pb2.Example() output_example.ParseFromString(output_binary) self.assertProtoEquals(input_example, output_example) else: output_example = example_pb2.Example() output_example.ParseFromString(binary_val) self.assertProtoEquals(examples.item(), output_example) def testEmptyTensor(self): self._testRoundTrip([]) self._testRoundTrip([[], [], []]) def testEmptyExamples(self): self._testRoundTrip([example(), example(), example()]) def testDenseFeaturesScalar(self): self._testRoundTrip( example(features=features({ "a": float_feature([1, 1, 3]) }))) def testDenseFeaturesVector(self): self._testRoundTrip([ example(features=features({ "a": float_feature([1, 1, 3]) })), example(features=features({ "a": float_feature([-1, -1, 2]) })), ]) def testDenseFeaturesMatrix(self): self._testRoundTrip([ [example(features=features({ "a": float_feature([1, 1, 3]) }))], [example(features=features({ "a": float_feature([-1, -1, 2]) }))], ]) def testSparseFeatures(self): self._testRoundTrip([ example(features=features({ "st_c": float_feature([3, 4]) })), example(features=features({ "st_c": float_feature([]) })), example(features=features({ "st_d": feature() })), example( features=features({ "st_c": float_feature([1, 2, -1]), "st_d": bytes_feature([b"hi"]) })), ]) def testSerializedContainingBytes(self): aname = "a" bname = "b*has+a:tricky_name" self._testRoundTrip([ example( features=features({ aname: float_feature([1, 1]), bname: bytes_feature([b"b0_str"]) })), example( features=features({ aname: float_feature([-1, -1]), bname: bytes_feature([b"b1"]) })), ]) @test_util.run_deprecated_v1 def testInvalidSyntax(self): with self.cached_session() as sess: json_tensor = constant_op.constant(["{]"]) binary_tensor = parsing_ops.decode_json_example(json_tensor) with self.assertRaisesOpError("Error while parsing JSON"): self.evaluate(binary_tensor) class ParseTensorOpTest(test.TestCase): @test_util.run_deprecated_v1 def testToFloat32(self): with self.cached_session(): expected = np.random.rand(3, 4, 5).astype(np.float32) tensor_proto = tensor_util.make_tensor_proto(expected) serialized = array_ops.placeholder(dtypes.string) tensor = parsing_ops.parse_tensor(serialized, dtypes.float32) result = tensor.eval( feed_dict={serialized: tensor_proto.SerializeToString()}) self.assertAllEqual(expected, result) @test_util.run_deprecated_v1 def testToUint8(self): with self.cached_session(): expected = np.random.rand(3, 4, 5).astype(np.uint8) tensor_proto = tensor_util.make_tensor_proto(expected) serialized = array_ops.placeholder(dtypes.string) tensor = parsing_ops.parse_tensor(serialized, dtypes.uint8) result = tensor.eval( feed_dict={serialized: tensor_proto.SerializeToString()}) self.assertAllEqual(expected, result) @test_util.run_deprecated_v1 def testTypeMismatch(self): with self.cached_session(): expected = np.random.rand(3, 4, 5).astype(np.uint8) tensor_proto = tensor_util.make_tensor_proto(expected) serialized = array_ops.placeholder(dtypes.string) tensor = parsing_ops.parse_tensor(serialized, dtypes.uint16) with self.assertRaisesOpError( r"Type mismatch between parsed tensor \(uint8\) and dtype " r"\(uint16\)"): tensor.eval(feed_dict={serialized: tensor_proto.SerializeToString()}) @test_util.run_deprecated_v1 def testInvalidInput(self): with self.cached_session(): serialized = array_ops.placeholder(dtypes.string) tensor = parsing_ops.parse_tensor(serialized, dtypes.uint16) with self.assertRaisesOpError( "Could not parse `serialized` as TensorProto: 'bogus'"): tensor.eval(feed_dict={serialized: "bogus"}) with self.assertRaisesOpError( r"Expected `serialized` to be a scalar, got shape: \[1\]"): tensor.eval(feed_dict={serialized: ["bogus"]}) if __name__ == "__main__": test.main()
{ "content_hash": "7c84fd5ca2c9a3a6ac77808a3f645b61", "timestamp": "", "source": "github", "line_count": 1836, "max_line_length": 80, "avg_line_length": 35.007625272331154, "alnum_prop": 0.524566698820674, "repo_name": "ghchinoy/tensorflow", "id": "672f571fb96b42e274a70d376bea425c40f98ca7", "size": "64963", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tensorflow/python/kernel_tests/parsing_ops_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "3568" }, { "name": "Batchfile", "bytes": "15317" }, { "name": "C", "bytes": "699905" }, { "name": "C#", "bytes": "8446" }, { "name": "C++", "bytes": "67022491" }, { "name": "CMake", "bytes": "206499" }, { "name": "Dockerfile", "bytes": "73602" }, { "name": "Go", "bytes": "1585039" }, { "name": "HTML", "bytes": "4680118" }, { "name": "Java", "bytes": "836400" }, { "name": "Jupyter Notebook", "bytes": "1665583" }, { "name": "LLVM", "bytes": "6536" }, { "name": "Makefile", "bytes": "98194" }, { "name": "Objective-C", "bytes": "94022" }, { "name": "Objective-C++", "bytes": "175222" }, { "name": "PHP", "bytes": "17600" }, { "name": "Pascal", "bytes": "3239" }, { "name": "Perl", "bytes": "7536" }, { "name": "Python", "bytes": "48407007" }, { "name": "RobotFramework", "bytes": "891" }, { "name": "Ruby", "bytes": "4733" }, { "name": "Shell", "bytes": "476920" }, { "name": "Smarty", "bytes": "27495" }, { "name": "Swift", "bytes": "56155" } ], "symlink_target": "" }
from passrotate.provider import Provider, ProviderOption, PromptType, register_provider from passrotate.forms import get_form, get_form_data from bs4 import BeautifulSoup from urllib.parse import urlparse import requests class Linode(Provider): """ [linode.com] username=Your Linode username expires=Optional, months till new password expires (0, 1, 3, 6, 12) """ name = "Linode" domains = [ "linode.com", ] options = { "username": ProviderOption(str, "Your Linode username"), "expires": ProviderOption({ "Never": "0", "1 month": "1", "3 months": "3", "6 months": "6", "12 months": "12", }, "Password expiry") } def __init__(self, options): self.username = options["username"] self.expiry = options.get("expires") or "0" def prepare(self, old_password): self._session = requests.Session() r = self._session.get("https://manager.linode.com") form = get_form(r.text, id="CFForm_1") form.update({ "auth_username": self.username, "auth_password": old_password, }) r = self._session.post("https://manager.linode.com/session/login", data=form) soup = BeautifulSoup(r.text, "html.parser") title = soup.find("title") if title.text != "Session Engaged!": raise Exception("Unable to log into Linode with your current password") r = self._session.get("https://manager.linode.com/linodes") url = urlparse(r.url) if url.path.startswith("/session/twofactor"): code = self.prompt("Enter your two-factor (TOTP) code", PromptType.totp) soup = BeautifulSoup(r.text, "html.parser") form = soup.find("form", attrs={ "id": "CFForm_1" }) action = form.get("action", "") r = self._session.post(action, data={ "auth_code": code }) r = self._session.get("https://manager.linode.com/profile/index") # Linode has a weird form on this page soup = BeautifulSoup(r.text, "html.parser") inputs = soup.find_all("input") form = get_form_data(inputs) form.update({ "auth_password": old_password }) r = self._session.post("https://manager.linode.com/profile/reauth", data=form) r = self._session.get("https://manager.linode.com/profile/auth") # This form is also weird. Why you gotta be weird, Linode? soup = BeautifulSoup(r.text, "html.parser") self._form = { "authenticity_token": soup.find("input", attrs={ "name": "authenticity_token" }).get("value", "") } def execute(self, old_password, new_password): self._form.update({ "password": new_password, "password2": new_password, "expires": self.expiry }) r = self._session.post("https://manager.linode.com/profile/password", data=self._form) if r.status_code != 200: raise Exception("Failed to update Linode password") register_provider(Linode)
{ "content_hash": "72bdf6a0d138241f16122b1cc4f1708b", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 109, "avg_line_length": 39.68354430379747, "alnum_prop": 0.5830940988835726, "repo_name": "SirCmpwn/pass-rotate", "id": "e38c822917b7b3347c03287d6c2cc59f6f631309", "size": "3135", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "passrotate/providers/linode.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "44610" } ], "symlink_target": "" }